diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a74101922..c3d9494de 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b142b72bea6f30d8efb36dfa8c58e0d63ae5329b \ No newline at end of file +file:/home/hector.castejon/universe/bazel-bin/openapi/all-internal.json \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 706329a62..cafdf4d18 100755 --- a/.gitattributes +++ b/.gitattributes @@ -55,6 +55,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java linguist-generated=true @@ -92,6 +93,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolic databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudget.java linguist-generated=true @@ -199,11 +201,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -219,7 +220,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true @@ -230,12 +230,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Credentials databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true @@ -248,10 +242,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -262,12 +252,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true @@ -288,7 +277,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true @@ -320,15 +308,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java linguist-generated=true @@ -338,7 +323,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java linguist-generated=true @@ -356,12 +340,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java linguist-generated=true @@ -385,7 +368,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumes databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java linguist-generated=true @@ -412,7 +394,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true @@ -461,9 +442,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCred databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true @@ -495,12 +473,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true @@ -564,6 +540,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoo databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java linguist-generated=true @@ -639,6 +616,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStat databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java linguist-generated=true @@ -824,9 +802,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTyp databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true @@ -837,9 +813,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSc databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true @@ -857,6 +830,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGet databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java linguist-generated=true @@ -865,8 +840,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpa databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true @@ -889,16 +862,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -906,7 +871,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true @@ -914,10 +878,53 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Unpublis databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Converters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java linguist-generated=true @@ -981,6 +988,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserService.java linguist-generated=true @@ -1081,6 +1089,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssign databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnown.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AuthenticationMethod.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java linguist-generated=true @@ -1100,12 +1109,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Condition.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true @@ -1289,6 +1303,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Consume databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Cost.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java linguist-generated=true @@ -1435,11 +1450,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java linguist-generated=true @@ -1508,10 +1521,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true @@ -1661,6 +1670,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ViewType.java li databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutput.java linguist-generated=true @@ -1720,6 +1730,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustom databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true @@ -1804,6 +1815,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCre databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequest.java linguist-generated=true @@ -1882,6 +1894,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/Converters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java linguist-generated=true @@ -1906,6 +1932,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessageRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java linguist-generated=true @@ -2030,6 +2057,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Compliance databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java linguist-generated=true @@ -2050,6 +2078,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java linguist-generated=true @@ -2195,7 +2225,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java linguist-generated=true @@ -2204,6 +2233,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotifi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true @@ -2228,6 +2258,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true @@ -2304,7 +2336,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAc databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true @@ -2322,6 +2354,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceN databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java linguist-generated=true @@ -2455,6 +2488,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoTypeName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ComparisonOperator.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java linguist-generated=true @@ -2634,6 +2668,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessMessage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonType.java linguist-generated=true @@ -2670,6 +2706,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java lin databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTag.java linguist-generated=true @@ -2731,6 +2768,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Vector databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Converters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java linguist-generated=true diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..7b016a89f --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "java.compile.nullAnalysis.mode": "automatic" +} \ No newline at end of file diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index cdcea322f..0eed8fa0c 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -103,5 +103,16 @@ jackson-datatype-jsr310 ${jackson.version} + + + com.google.protobuf + protobuf-java + 3.25.1 + + + com.google.protobuf + protobuf-java-util + 3.25.1 + diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d4c066a69..4e6f0e23c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -18,8 +18,6 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; -import com.databricks.sdk.service.catalog.DatabaseInstancesAPI; -import com.databricks.sdk.service.catalog.DatabaseInstancesService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -83,8 +81,8 @@ import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI; import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService; import com.databricks.sdk.service.dashboards.LakeviewService; -import com.databricks.sdk.service.dashboards.QueryExecutionAPI; -import com.databricks.sdk.service.dashboards.QueryExecutionService; +import com.databricks.sdk.service.database.DatabaseAPI; +import com.databricks.sdk.service.database.DatabaseService; import com.databricks.sdk.service.files.DbfsService; import com.databricks.sdk.service.files.FilesAPI; import com.databricks.sdk.service.files.FilesService; @@ -140,6 +138,8 @@ import com.databricks.sdk.service.ml.ModelRegistryService; import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2API; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2Service; import com.databricks.sdk.service.serving.ServingEndpointsAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneService; @@ -243,7 +243,7 @@ public class WorkspaceClient { private DashboardWidgetsAPI dashboardWidgetsAPI; private DashboardsAPI dashboardsAPI; private DataSourcesAPI dataSourcesAPI; - private DatabaseInstancesAPI databaseInstancesAPI; + private DatabaseAPI databaseAPI; private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; @@ -281,10 +281,10 @@ public class WorkspaceClient { private ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboardsAPI; private ProviderProvidersAPI providerProvidersAPI; private ProvidersAPI providersAPI; + private QualityMonitorV2API qualityMonitorV2API; private QualityMonitorsAPI qualityMonitorsAPI; private QueriesAPI queriesAPI; private QueriesLegacyAPI queriesLegacyAPI; - private QueryExecutionAPI queryExecutionAPI; private QueryHistoryAPI queryHistoryAPI; private QueryVisualizationsAPI queryVisualizationsAPI; private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI; @@ -353,7 +353,7 @@ public WorkspaceClient(DatabricksConfig config) { dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); dashboardsAPI = new DashboardsAPI(apiClient); dataSourcesAPI = new DataSourcesAPI(apiClient); - databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); + databaseAPI = new DatabaseAPI(apiClient); dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); @@ -391,10 +391,10 @@ public WorkspaceClient(DatabricksConfig config) { providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient); providerProvidersAPI = new ProviderProvidersAPI(apiClient); providersAPI = new ProvidersAPI(apiClient); + qualityMonitorV2API = new QualityMonitorV2API(apiClient); qualityMonitorsAPI = new QualityMonitorsAPI(apiClient); queriesAPI = new QueriesAPI(apiClient); queriesLegacyAPI = new QueriesLegacyAPI(apiClient); - queryExecutionAPI = new QueryExecutionAPI(apiClient); queryHistoryAPI = new QueryHistoryAPI(apiClient); queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); @@ -714,8 +714,8 @@ public DataSourcesAPI dataSources() { } /** Database Instances provide access to a database via REST API or direct SQL. */ - public DatabaseInstancesAPI databaseInstances() { - return databaseInstancesAPI; + public DatabaseAPI database() { + return databaseAPI; } /** @@ -796,6 +796,8 @@ public ExternalLocationsAPI externalLocations() { * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ public FilesAPI files() { @@ -1211,6 +1213,11 @@ public ProvidersAPI providers() { return providersAPI; } + /** Manage data quality of UC objects (currently support `schema`) */ + public QualityMonitorV2API qualityMonitorV2() { + return qualityMonitorV2API; + } + /** * A monitor computes and monitors data or model quality metrics for a table over time. It * generates metrics tables and a dashboard that you can use to monitor table health and set @@ -1249,11 +1256,6 @@ public QueriesLegacyAPI queriesLegacy() { return queriesLegacyAPI; } - /** Query execution APIs for AI / BI Dashboards */ - public QueryExecutionAPI queryExecution() { - return queryExecutionAPI; - } - /** * A service responsible for storing and retrieving the list of queries run against SQL endpoints * and serverless compute. @@ -2068,14 +2070,14 @@ public WorkspaceClient withDataSourcesAPI(DataSourcesAPI dataSources) { return this; } - /** Replace the default DatabaseInstancesService with a custom implementation. */ - public WorkspaceClient withDatabaseInstancesImpl(DatabaseInstancesService databaseInstances) { - return this.withDatabaseInstancesAPI(new DatabaseInstancesAPI(databaseInstances)); + /** Replace the default DatabaseService with a custom implementation. */ + public WorkspaceClient withDatabaseImpl(DatabaseService database) { + return this.withDatabaseAPI(new DatabaseAPI(database)); } - /** Replace the default DatabaseInstancesAPI with a custom implementation. */ - public WorkspaceClient withDatabaseInstancesAPI(DatabaseInstancesAPI databaseInstances) { - this.databaseInstancesAPI = databaseInstances; + /** Replace the default DatabaseAPI with a custom implementation. */ + public WorkspaceClient withDatabaseAPI(DatabaseAPI database) { + this.databaseAPI = database; return this; } @@ -2507,6 +2509,17 @@ public WorkspaceClient withProvidersAPI(ProvidersAPI providers) { return this; } + /** Replace the default QualityMonitorV2Service with a custom implementation. */ + public WorkspaceClient withQualityMonitorV2Impl(QualityMonitorV2Service qualityMonitorV2) { + return this.withQualityMonitorV2API(new QualityMonitorV2API(qualityMonitorV2)); + } + + /** Replace the default QualityMonitorV2API with a custom implementation. */ + public WorkspaceClient withQualityMonitorV2API(QualityMonitorV2API qualityMonitorV2) { + this.qualityMonitorV2API = qualityMonitorV2; + return this; + } + /** Replace the default QualityMonitorsService with a custom implementation. */ public WorkspaceClient withQualityMonitorsImpl(QualityMonitorsService qualityMonitors) { return this.withQualityMonitorsAPI(new QualityMonitorsAPI(qualityMonitors)); @@ -2540,17 +2553,6 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) { return this; } - /** Replace the default QueryExecutionService with a custom implementation. */ - public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) { - return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution)); - } - - /** Replace the default QueryExecutionAPI with a custom implementation. */ - public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) { - this.queryExecutionAPI = queryExecution; - return this; - } - /** Replace the default QueryHistoryService with a custom implementation. */ public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) { return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index de6548982..df16ebae3 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -410,13 +410,17 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** @deprecated Use {@link #getAzureUseMsi()} instead. */ + /** + * @deprecated Use {@link #getAzureUseMsi()} instead. + */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ + /** + * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. + */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationDeserializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationDeserializer.java new file mode 100644 index 000000000..765dccd66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationDeserializer.java @@ -0,0 +1,23 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.google.protobuf.Duration; +import com.google.protobuf.util.Durations; +import java.io.IOException; + +public class DurationDeserializer extends JsonDeserializer { + @Override + public Duration deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + String durationStr = p.getValueAsString(); + if (durationStr == null || durationStr.isEmpty()) { + return null; + } + try { + return Durations.parse(durationStr); // Parses duration format like "3.000s" + } catch (Exception e) { + throw new IOException("Failed to parse duration: " + durationStr, e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationSerializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationSerializer.java new file mode 100644 index 000000000..27fde56b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/DurationSerializer.java @@ -0,0 +1,21 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.google.protobuf.Duration; +import com.google.protobuf.util.Durations; +import java.io.IOException; + +public class DurationSerializer extends JsonSerializer { + @Override + public void serialize(Duration value, JsonGenerator gen, SerializerProvider serializers) + throws IOException { + if (value != null) { + String durationStr = Durations.toString(value); // Converts to "3.000s" + gen.writeString(durationStr); + } else { + gen.writeNull(); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskDeserializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskDeserializer.java new file mode 100644 index 000000000..5061f4b3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskDeserializer.java @@ -0,0 +1,23 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; + +public class FieldMaskDeserializer extends JsonDeserializer { + @Override + public FieldMask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + String fieldMaskStr = p.getValueAsString(); + if (fieldMaskStr == null || fieldMaskStr.isEmpty()) { + return null; + } + try { + return FieldMaskUtil.fromJsonString(fieldMaskStr); // Parses JSON string format + } catch (Exception e) { + throw new IOException("Failed to parse field mask: " + fieldMaskStr, e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskSerializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskSerializer.java new file mode 100644 index 000000000..301f0b5bb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/FieldMaskSerializer.java @@ -0,0 +1,21 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; + +public class FieldMaskSerializer extends JsonSerializer { + @Override + public void serialize(FieldMask value, JsonGenerator gen, SerializerProvider serializers) + throws IOException { + if (value != null) { + String fieldMaskStr = FieldMaskUtil.toJsonString(value); // Converts to JSON string format + gen.writeString(fieldMaskStr); + } else { + gen.writeNull(); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampDeserializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampDeserializer.java new file mode 100644 index 000000000..b5474bdc6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampDeserializer.java @@ -0,0 +1,23 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; +import java.io.IOException; + +public class TimestampDeserializer extends JsonDeserializer { + @Override + public Timestamp deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + String timestampStr = p.getValueAsString(); + if (timestampStr == null || timestampStr.isEmpty()) { + return null; + } + try { + return Timestamps.parse(timestampStr); // Parses RFC 3339 format + } catch (Exception e) { + throw new IOException("Failed to parse timestamp: " + timestampStr, e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampSerializer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampSerializer.java new file mode 100644 index 000000000..a760b4749 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/serialization/TimestampSerializer.java @@ -0,0 +1,21 @@ +package com.databricks.sdk.core.serialization; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; +import java.io.IOException; + +public class TimestampSerializer extends JsonSerializer { + @Override + public void serialize(Timestamp value, JsonGenerator gen, SerializerProvider serializers) + throws IOException { + if (value != null) { + String timestampStr = Timestamps.toString(value); // Converts to RFC 3339 format + gen.writeString(timestampStr); + } else { + gen.writeNull(); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 97b6f3b19..c61103304 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -4,114 +4,102 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = App.AppSerializer.class) +@JsonDeserialize(using = App.AppDeserializer.class) public class App { /** * The active deployment of the app. A deployment is considered active when it has been deployed * to the app compute. */ - @JsonProperty("active_deployment") private AppDeployment activeDeployment; /** */ - @JsonProperty("app_status") private ApplicationStatus appStatus; /** */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** */ - @JsonProperty("compute_status") private ComputeStatus computeStatus; /** The creation time of the app. Formatted timestamp in ISO 6801. */ - @JsonProperty("create_time") private String createTime; /** The email of the user that created the app. */ - @JsonProperty("creator") private String creator; /** * The default workspace file system path of the source code from which app deployment are * created. This field tracks the workspace source code path of the last active deployment. */ - @JsonProperty("default_source_code_path") private String defaultSourceCodePath; /** The description of the app. */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; /** The effective api scopes granted to the user access token. */ - @JsonProperty("effective_user_api_scopes") private Collection effectiveUserApiScopes; /** The unique identifier of the app. */ - @JsonProperty("id") private String id; /** * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. * It must be unique within the workspace. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("oauth2_app_client_id") private String oauth2AppClientId; /** */ - @JsonProperty("oauth2_app_integration_id") private String oauth2AppIntegrationId; /** * The pending deployment of the app. A deployment is considered pending when it is being prepared * for deployment to the app compute. */ - @JsonProperty("pending_deployment") private AppDeployment pendingDeployment; /** Resources for the app. */ - @JsonProperty("resources") private Collection resources; /** */ - @JsonProperty("service_principal_client_id") private String servicePrincipalClientId; /** */ - @JsonProperty("service_principal_id") private Long servicePrincipalId; /** */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** The update time of the app. Formatted timestamp in ISO 6801. */ - @JsonProperty("update_time") private String updateTime; /** The email of the user that last updated the app. */ - @JsonProperty("updater") private String updater; /** The URL of the app once it is deployed. */ - @JsonProperty("url") private String url; /** */ - @JsonProperty("user_api_scopes") private Collection userApiScopes; public App setActiveDeployment(AppDeployment activeDeployment) { @@ -407,4 +395,81 @@ public String toString() { .add("userApiScopes", userApiScopes) .toString(); } + + AppPb toPb() { + AppPb pb = new AppPb(); + pb.setActiveDeployment(activeDeployment); + pb.setAppStatus(appStatus); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setComputeStatus(computeStatus); + pb.setCreateTime(createTime); + pb.setCreator(creator); + pb.setDefaultSourceCodePath(defaultSourceCodePath); + pb.setDescription(description); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + pb.setEffectiveUserApiScopes(effectiveUserApiScopes); + pb.setId(id); + pb.setName(name); + pb.setOauth2AppClientId(oauth2AppClientId); + pb.setOauth2AppIntegrationId(oauth2AppIntegrationId); + pb.setPendingDeployment(pendingDeployment); + pb.setResources(resources); + pb.setServicePrincipalClientId(servicePrincipalClientId); + pb.setServicePrincipalId(servicePrincipalId); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUpdateTime(updateTime); + pb.setUpdater(updater); + pb.setUrl(url); + pb.setUserApiScopes(userApiScopes); + + return pb; + } + + static App fromPb(AppPb pb) { + App model = new App(); + model.setActiveDeployment(pb.getActiveDeployment()); + model.setAppStatus(pb.getAppStatus()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setComputeStatus(pb.getComputeStatus()); + model.setCreateTime(pb.getCreateTime()); + model.setCreator(pb.getCreator()); + model.setDefaultSourceCodePath(pb.getDefaultSourceCodePath()); + model.setDescription(pb.getDescription()); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + model.setEffectiveUserApiScopes(pb.getEffectiveUserApiScopes()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setOauth2AppClientId(pb.getOauth2AppClientId()); + model.setOauth2AppIntegrationId(pb.getOauth2AppIntegrationId()); + model.setPendingDeployment(pb.getPendingDeployment()); + model.setResources(pb.getResources()); + model.setServicePrincipalClientId(pb.getServicePrincipalClientId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUpdateTime(pb.getUpdateTime()); + model.setUpdater(pb.getUpdater()); + model.setUrl(pb.getUrl()); + model.setUserApiScopes(pb.getUserApiScopes()); + + return model; + } + + public static class AppSerializer extends JsonSerializer { + @Override + public void serialize(App value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppDeserializer extends JsonDeserializer { + @Override + public App deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppPb pb = mapper.readValue(p, AppPb.class); + return App.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java index 40dc96f30..d0f6a57dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppAccessControlRequest.AppAccessControlRequestSerializer.class) +@JsonDeserialize(using = AppAccessControlRequest.AppAccessControlRequestDeserializer.class) public class AppAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public AppAccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,47 @@ public String toString() { .add("userName", userName) .toString(); } + + AppAccessControlRequestPb toPb() { + AppAccessControlRequestPb pb = new AppAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static AppAccessControlRequest fromPb(AppAccessControlRequestPb pb) { + AppAccessControlRequest model = new AppAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class AppAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + AppAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public AppAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppAccessControlRequestPb pb = mapper.readValue(p, AppAccessControlRequestPb.class); + return AppAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequestPb.java new file mode 100755 index 000000000..3ae8eba3f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private AppPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public AppAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public AppAccessControlRequestPb setPermissionLevel(AppPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public AppPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public AppAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public AppAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppAccessControlRequestPb that = (AppAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(AppAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java index eb6a00b76..0faa582a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AppAccessControlResponse.AppAccessControlResponseSerializer.class) +@JsonDeserialize(using = AppAccessControlResponse.AppAccessControlResponseDeserializer.class) public class AppAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public AppAccessControlResponse setAllPermissions(Collection allPermissions) { @@ -102,4 +108,49 @@ public String toString() { .add("userName", userName) .toString(); } + + AppAccessControlResponsePb toPb() { + AppAccessControlResponsePb pb = new AppAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static AppAccessControlResponse fromPb(AppAccessControlResponsePb pb) { + AppAccessControlResponse model = new AppAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class AppAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + AppAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public AppAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppAccessControlResponsePb pb = mapper.readValue(p, AppAccessControlResponsePb.class); + return AppAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponsePb.java new file mode 100755 index 000000000..cd2ca6f45 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AppAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public AppAccessControlResponsePb setAllPermissions(Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public AppAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public AppAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public AppAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public AppAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppAccessControlResponsePb that = (AppAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(AppAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java index 0961135b1..2cc9ef066 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppDeployment.AppDeploymentSerializer.class) +@JsonDeserialize(using = AppDeployment.AppDeploymentDeserializer.class) public class AppDeployment { /** The creation time of the deployment. Formatted timestamp in ISO 6801. */ - @JsonProperty("create_time") private String createTime; /** The email of the user creates the deployment. */ - @JsonProperty("creator") private String creator; /** The deployment artifacts for an app. */ - @JsonProperty("deployment_artifacts") private AppDeploymentArtifacts deploymentArtifacts; /** The unique id of the deployment. */ - @JsonProperty("deployment_id") private String deploymentId; /** The mode of which the deployment will manage the source code. */ - @JsonProperty("mode") private AppDeploymentMode mode; /** @@ -36,15 +42,12 @@ public class AppDeployment { * deployment creation, whereas the latter provides a system generated stable snapshotted source * code path used by the deployment. */ - @JsonProperty("source_code_path") private String sourceCodePath; /** Status and status message of the deployment */ - @JsonProperty("status") private AppDeploymentStatus status; /** The update time of the deployment. Formatted timestamp in ISO 6801. */ - @JsonProperty("update_time") private String updateTime; public AppDeployment setCreateTime(String createTime) { @@ -160,4 +163,51 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + AppDeploymentPb toPb() { + AppDeploymentPb pb = new AppDeploymentPb(); + pb.setCreateTime(createTime); + pb.setCreator(creator); + pb.setDeploymentArtifacts(deploymentArtifacts); + pb.setDeploymentId(deploymentId); + pb.setMode(mode); + pb.setSourceCodePath(sourceCodePath); + pb.setStatus(status); + pb.setUpdateTime(updateTime); + + return pb; + } + + static AppDeployment fromPb(AppDeploymentPb pb) { + AppDeployment model = new AppDeployment(); + model.setCreateTime(pb.getCreateTime()); + model.setCreator(pb.getCreator()); + model.setDeploymentArtifacts(pb.getDeploymentArtifacts()); + model.setDeploymentId(pb.getDeploymentId()); + model.setMode(pb.getMode()); + model.setSourceCodePath(pb.getSourceCodePath()); + model.setStatus(pb.getStatus()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class AppDeploymentSerializer extends JsonSerializer { + @Override + public void serialize(AppDeployment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppDeploymentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppDeploymentDeserializer extends JsonDeserializer { + @Override + public AppDeployment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppDeploymentPb pb = mapper.readValue(p, AppDeploymentPb.class); + return AppDeployment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifacts.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifacts.java index 26666fed0..3221f04ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifacts.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifacts.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppDeploymentArtifacts.AppDeploymentArtifactsSerializer.class) +@JsonDeserialize(using = AppDeploymentArtifacts.AppDeploymentArtifactsDeserializer.class) public class AppDeploymentArtifacts { /** The snapshotted workspace file system path of the source code loaded by the deployed app. */ - @JsonProperty("source_code_path") private String sourceCodePath; public AppDeploymentArtifacts setSourceCodePath(String sourceCodePath) { @@ -41,4 +51,41 @@ public String toString() { .add("sourceCodePath", sourceCodePath) .toString(); } + + AppDeploymentArtifactsPb toPb() { + AppDeploymentArtifactsPb pb = new AppDeploymentArtifactsPb(); + pb.setSourceCodePath(sourceCodePath); + + return pb; + } + + static AppDeploymentArtifacts fromPb(AppDeploymentArtifactsPb pb) { + AppDeploymentArtifacts model = new AppDeploymentArtifacts(); + model.setSourceCodePath(pb.getSourceCodePath()); + + return model; + } + + public static class AppDeploymentArtifactsSerializer + extends JsonSerializer { + @Override + public void serialize( + AppDeploymentArtifacts value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppDeploymentArtifactsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppDeploymentArtifactsDeserializer + extends JsonDeserializer { + @Override + public AppDeploymentArtifacts deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppDeploymentArtifactsPb pb = mapper.readValue(p, AppDeploymentArtifactsPb.class); + return AppDeploymentArtifacts.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifactsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifactsPb.java new file mode 100755 index 000000000..a501fd075 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentArtifactsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppDeploymentArtifactsPb { + @JsonProperty("source_code_path") + private String sourceCodePath; + + public AppDeploymentArtifactsPb setSourceCodePath(String sourceCodePath) { + this.sourceCodePath = sourceCodePath; + return this; + } + + public String getSourceCodePath() { + return sourceCodePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppDeploymentArtifactsPb that = (AppDeploymentArtifactsPb) o; + return Objects.equals(sourceCodePath, that.sourceCodePath); + } + + @Override + public int hashCode() { + return Objects.hash(sourceCodePath); + } + + @Override + public String toString() { + return new ToStringer(AppDeploymentArtifactsPb.class) + .add("sourceCodePath", sourceCodePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentPb.java new file mode 100755 index 000000000..cd26abd3f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppDeploymentPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("deployment_artifacts") + private AppDeploymentArtifacts deploymentArtifacts; + + @JsonProperty("deployment_id") + private String deploymentId; + + @JsonProperty("mode") + private AppDeploymentMode mode; + + @JsonProperty("source_code_path") + private String sourceCodePath; + + @JsonProperty("status") + private AppDeploymentStatus status; + + @JsonProperty("update_time") + private String updateTime; + + public AppDeploymentPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public AppDeploymentPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public AppDeploymentPb setDeploymentArtifacts(AppDeploymentArtifacts deploymentArtifacts) { + this.deploymentArtifacts = deploymentArtifacts; + return this; + } + + public AppDeploymentArtifacts getDeploymentArtifacts() { + return deploymentArtifacts; + } + + public AppDeploymentPb setDeploymentId(String deploymentId) { + this.deploymentId = deploymentId; + return this; + } + + public String getDeploymentId() { + return deploymentId; + } + + public AppDeploymentPb setMode(AppDeploymentMode mode) { + this.mode = mode; + return this; + } + + public AppDeploymentMode getMode() { + return mode; + } + + public AppDeploymentPb setSourceCodePath(String sourceCodePath) { + this.sourceCodePath = sourceCodePath; + return this; + } + + public String getSourceCodePath() { + return sourceCodePath; + } + + public AppDeploymentPb setStatus(AppDeploymentStatus status) { + this.status = status; + return this; + } + + public AppDeploymentStatus getStatus() { + return status; + } + + public AppDeploymentPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppDeploymentPb that = (AppDeploymentPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(creator, that.creator) + && Objects.equals(deploymentArtifacts, that.deploymentArtifacts) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(mode, that.mode) + && Objects.equals(sourceCodePath, that.sourceCodePath) + && Objects.equals(status, that.status) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + creator, + deploymentArtifacts, + deploymentId, + mode, + sourceCodePath, + status, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(AppDeploymentPb.class) + .add("createTime", createTime) + .add("creator", creator) + .add("deploymentArtifacts", deploymentArtifacts) + .add("deploymentId", deploymentId) + .add("mode", mode) + .add("sourceCodePath", sourceCodePath) + .add("status", status) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java index 095be8232..9c91c4347 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppDeploymentStatus.AppDeploymentStatusSerializer.class) +@JsonDeserialize(using = AppDeploymentStatus.AppDeploymentStatusDeserializer.class) public class AppDeploymentStatus { /** Message corresponding with the deployment state. */ - @JsonProperty("message") private String message; /** State of the deployment. */ - @JsonProperty("state") private AppDeploymentState state; public AppDeploymentStatus setMessage(String message) { @@ -55,4 +64,41 @@ public String toString() { .add("state", state) .toString(); } + + AppDeploymentStatusPb toPb() { + AppDeploymentStatusPb pb = new AppDeploymentStatusPb(); + pb.setMessage(message); + pb.setState(state); + + return pb; + } + + static AppDeploymentStatus fromPb(AppDeploymentStatusPb pb) { + AppDeploymentStatus model = new AppDeploymentStatus(); + model.setMessage(pb.getMessage()); + model.setState(pb.getState()); + + return model; + } + + public static class AppDeploymentStatusSerializer extends JsonSerializer { + @Override + public void serialize(AppDeploymentStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppDeploymentStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppDeploymentStatusDeserializer + extends JsonDeserializer { + @Override + public AppDeploymentStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppDeploymentStatusPb pb = mapper.readValue(p, AppDeploymentStatusPb.class); + return AppDeploymentStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatusPb.java new file mode 100755 index 000000000..cacc7e475 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatusPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppDeploymentStatusPb { + @JsonProperty("message") + private String message; + + @JsonProperty("state") + private AppDeploymentState state; + + public AppDeploymentStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public AppDeploymentStatusPb setState(AppDeploymentState state) { + this.state = state; + return this; + } + + public AppDeploymentState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppDeploymentStatusPb that = (AppDeploymentStatusPb) o; + return Objects.equals(message, that.message) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(message, state); + } + + @Override + public String toString() { + return new ToStringer(AppDeploymentStatusPb.class) + .add("message", message) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPb.java new file mode 100755 index 000000000..678dbc330 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPb.java @@ -0,0 +1,375 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AppPb { + @JsonProperty("active_deployment") + private AppDeployment activeDeployment; + + @JsonProperty("app_status") + private ApplicationStatus appStatus; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("compute_status") + private ComputeStatus computeStatus; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("default_source_code_path") + private String defaultSourceCodePath; + + @JsonProperty("description") + private String description; + + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + @JsonProperty("effective_user_api_scopes") + private Collection effectiveUserApiScopes; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("oauth2_app_client_id") + private String oauth2AppClientId; + + @JsonProperty("oauth2_app_integration_id") + private String oauth2AppIntegrationId; + + @JsonProperty("pending_deployment") + private AppDeployment pendingDeployment; + + @JsonProperty("resources") + private Collection resources; + + @JsonProperty("service_principal_client_id") + private String servicePrincipalClientId; + + @JsonProperty("service_principal_id") + private Long servicePrincipalId; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("updater") + private String updater; + + @JsonProperty("url") + private String url; + + @JsonProperty("user_api_scopes") + private Collection userApiScopes; + + public AppPb setActiveDeployment(AppDeployment activeDeployment) { + this.activeDeployment = activeDeployment; + return this; + } + + public AppDeployment getActiveDeployment() { + return activeDeployment; + } + + public AppPb setAppStatus(ApplicationStatus appStatus) { + this.appStatus = appStatus; + return this; + } + + public ApplicationStatus getAppStatus() { + return appStatus; + } + + public AppPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public AppPb setComputeStatus(ComputeStatus computeStatus) { + this.computeStatus = computeStatus; + return this; + } + + public ComputeStatus getComputeStatus() { + return computeStatus; + } + + public AppPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public AppPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public AppPb setDefaultSourceCodePath(String defaultSourceCodePath) { + this.defaultSourceCodePath = defaultSourceCodePath; + return this; + } + + public String getDefaultSourceCodePath() { + return defaultSourceCodePath; + } + + public AppPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AppPb setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public AppPb setEffectiveUserApiScopes(Collection effectiveUserApiScopes) { + this.effectiveUserApiScopes = effectiveUserApiScopes; + return this; + } + + public Collection getEffectiveUserApiScopes() { + return effectiveUserApiScopes; + } + + public AppPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AppPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppPb setOauth2AppClientId(String oauth2AppClientId) { + this.oauth2AppClientId = oauth2AppClientId; + return this; + } + + public String getOauth2AppClientId() { + return oauth2AppClientId; + } + + public AppPb setOauth2AppIntegrationId(String oauth2AppIntegrationId) { + this.oauth2AppIntegrationId = oauth2AppIntegrationId; + return this; + } + + public String getOauth2AppIntegrationId() { + return oauth2AppIntegrationId; + } + + public AppPb setPendingDeployment(AppDeployment pendingDeployment) { + this.pendingDeployment = pendingDeployment; + return this; + } + + public AppDeployment getPendingDeployment() { + return pendingDeployment; + } + + public AppPb setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + public AppPb setServicePrincipalClientId(String servicePrincipalClientId) { + this.servicePrincipalClientId = servicePrincipalClientId; + return this; + } + + public String getServicePrincipalClientId() { + return servicePrincipalClientId; + } + + public AppPb setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + public AppPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public AppPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public AppPb setUpdater(String updater) { + this.updater = updater; + return this; + } + + public String getUpdater() { + return updater; + } + + public AppPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + public AppPb setUserApiScopes(Collection userApiScopes) { + this.userApiScopes = userApiScopes; + return this; + } + + public Collection getUserApiScopes() { + return userApiScopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppPb that = (AppPb) o; + return Objects.equals(activeDeployment, that.activeDeployment) + && Objects.equals(appStatus, that.appStatus) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(computeStatus, that.computeStatus) + && Objects.equals(createTime, that.createTime) + && Objects.equals(creator, that.creator) + && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) + && Objects.equals(description, that.description) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(oauth2AppClientId, that.oauth2AppClientId) + && Objects.equals(oauth2AppIntegrationId, that.oauth2AppIntegrationId) + && Objects.equals(pendingDeployment, that.pendingDeployment) + && Objects.equals(resources, that.resources) + && Objects.equals(servicePrincipalClientId, that.servicePrincipalClientId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(updater, that.updater) + && Objects.equals(url, that.url) + && Objects.equals(userApiScopes, that.userApiScopes); + } + + @Override + public int hashCode() { + return Objects.hash( + activeDeployment, + appStatus, + budgetPolicyId, + computeStatus, + createTime, + creator, + defaultSourceCodePath, + description, + effectiveBudgetPolicyId, + effectiveUserApiScopes, + id, + name, + oauth2AppClientId, + oauth2AppIntegrationId, + pendingDeployment, + resources, + servicePrincipalClientId, + servicePrincipalId, + servicePrincipalName, + updateTime, + updater, + url, + userApiScopes); + } + + @Override + public String toString() { + return new ToStringer(AppPb.class) + .add("activeDeployment", activeDeployment) + .add("appStatus", appStatus) + .add("budgetPolicyId", budgetPolicyId) + .add("computeStatus", computeStatus) + .add("createTime", createTime) + .add("creator", creator) + .add("defaultSourceCodePath", defaultSourceCodePath) + .add("description", description) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUserApiScopes", effectiveUserApiScopes) + .add("id", id) + .add("name", name) + .add("oauth2AppClientId", oauth2AppClientId) + .add("oauth2AppIntegrationId", oauth2AppIntegrationId) + .add("pendingDeployment", pendingDeployment) + .add("resources", resources) + .add("servicePrincipalClientId", servicePrincipalClientId) + .add("servicePrincipalId", servicePrincipalId) + .add("servicePrincipalName", servicePrincipalName) + .add("updateTime", updateTime) + .add("updater", updater) + .add("url", url) + .add("userApiScopes", userApiScopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java index 39fcd6726..90a6db628 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AppPermission.AppPermissionSerializer.class) +@JsonDeserialize(using = AppPermission.AppPermissionDeserializer.class) public class AppPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; public AppPermission setInherited(Boolean inherited) { @@ -72,4 +80,41 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + AppPermissionPb toPb() { + AppPermissionPb pb = new AppPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static AppPermission fromPb(AppPermissionPb pb) { + AppPermission model = new AppPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class AppPermissionSerializer extends JsonSerializer { + @Override + public void serialize(AppPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppPermissionDeserializer extends JsonDeserializer { + @Override + public AppPermission deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppPermissionPb pb = mapper.readValue(p, AppPermissionPb.class); + return AppPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionPb.java new file mode 100755 index 000000000..41165f2d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AppPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private AppPermissionLevel permissionLevel; + + public AppPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public AppPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public AppPermissionPb setPermissionLevel(AppPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public AppPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppPermissionPb that = (AppPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(AppPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java index 06a281e13..939533ff7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AppPermissions.AppPermissionsSerializer.class) +@JsonDeserialize(using = AppPermissions.AppPermissionsDeserializer.class) public class AppPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public AppPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + AppPermissionsPb toPb() { + AppPermissionsPb pb = new AppPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static AppPermissions fromPb(AppPermissionsPb pb) { + AppPermissions model = new AppPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class AppPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(AppPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppPermissionsDeserializer extends JsonDeserializer { + @Override + public AppPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppPermissionsPb pb = mapper.readValue(p, AppPermissionsPb.class); + return AppPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java index 94fb8781f..0ac838279 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppPermissionsDescription.AppPermissionsDescriptionSerializer.class) +@JsonDeserialize(using = AppPermissionsDescription.AppPermissionsDescriptionDeserializer.class) public class AppPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; public AppPermissionsDescription setDescription(String description) { @@ -56,4 +65,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + AppPermissionsDescriptionPb toPb() { + AppPermissionsDescriptionPb pb = new AppPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static AppPermissionsDescription fromPb(AppPermissionsDescriptionPb pb) { + AppPermissionsDescription model = new AppPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class AppPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + AppPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public AppPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppPermissionsDescriptionPb pb = mapper.readValue(p, AppPermissionsDescriptionPb.class); + return AppPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescriptionPb.java new file mode 100755 index 000000000..163761b60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private AppPermissionLevel permissionLevel; + + public AppPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AppPermissionsDescriptionPb setPermissionLevel(AppPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public AppPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppPermissionsDescriptionPb that = (AppPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(AppPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsPb.java new file mode 100755 index 000000000..ca01d8ee3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AppPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public AppPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public AppPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public AppPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppPermissionsPb that = (AppPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(AppPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java index a2733ecd9..44191ce8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AppPermissionsRequest.AppPermissionsRequestSerializer.class) +@JsonDeserialize(using = AppPermissionsRequest.AppPermissionsRequestDeserializer.class) public class AppPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The app for which to get or manage permissions. */ - @JsonIgnore private String appName; + private String appName; public AppPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("appName", appName) .toString(); } + + AppPermissionsRequestPb toPb() { + AppPermissionsRequestPb pb = new AppPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setAppName(appName); + + return pb; + } + + static AppPermissionsRequest fromPb(AppPermissionsRequestPb pb) { + AppPermissionsRequest model = new AppPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setAppName(pb.getAppName()); + + return model; + } + + public static class AppPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + AppPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public AppPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppPermissionsRequestPb pb = mapper.readValue(p, AppPermissionsRequestPb.class); + return AppPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequestPb.java new file mode 100755 index 000000000..a1abe7d83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AppPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String appName; + + public AppPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public AppPermissionsRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppPermissionsRequestPb that = (AppPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(appName, that.appName); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, appName); + } + + @Override + public String toString() { + return new ToStringer(AppPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("appName", appName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java index 727e187f3..23127fe00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResource.AppResourceSerializer.class) +@JsonDeserialize(using = AppResource.AppResourceDeserializer.class) public class AppResource { /** Description of the App Resource. */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("job") private AppResourceJob job; /** Name of the App Resource. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("secret") private AppResourceSecret secret; /** */ - @JsonProperty("serving_endpoint") private AppResourceServingEndpoint servingEndpoint; /** */ - @JsonProperty("sql_warehouse") private AppResourceSqlWarehouse sqlWarehouse; /** */ - @JsonProperty("uc_securable") private AppResourceUcSecurable ucSecurable; public AppResource setDescription(String description) { @@ -131,4 +135,49 @@ public String toString() { .add("ucSecurable", ucSecurable) .toString(); } + + AppResourcePb toPb() { + AppResourcePb pb = new AppResourcePb(); + pb.setDescription(description); + pb.setJob(job); + pb.setName(name); + pb.setSecret(secret); + pb.setServingEndpoint(servingEndpoint); + pb.setSqlWarehouse(sqlWarehouse); + pb.setUcSecurable(ucSecurable); + + return pb; + } + + static AppResource fromPb(AppResourcePb pb) { + AppResource model = new AppResource(); + model.setDescription(pb.getDescription()); + model.setJob(pb.getJob()); + model.setName(pb.getName()); + model.setSecret(pb.getSecret()); + model.setServingEndpoint(pb.getServingEndpoint()); + model.setSqlWarehouse(pb.getSqlWarehouse()); + model.setUcSecurable(pb.getUcSecurable()); + + return model; + } + + public static class AppResourceSerializer extends JsonSerializer { + @Override + public void serialize(AppResource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceDeserializer extends JsonDeserializer { + @Override + public AppResource deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourcePb pb = mapper.readValue(p, AppResourcePb.class); + return AppResource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java index e9f546520..9bcab7fe5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResourceJob.AppResourceJobSerializer.class) +@JsonDeserialize(using = AppResourceJob.AppResourceJobDeserializer.class) public class AppResourceJob { /** Id of the job to grant permission on. */ - @JsonProperty("id") private String id; /** * Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", * "CAN_MANAGE_RUN", "CAN_VIEW". */ - @JsonProperty("permission") private AppResourceJobJobPermission permission; public AppResourceJob setId(String id) { @@ -58,4 +67,40 @@ public String toString() { .add("permission", permission) .toString(); } + + AppResourceJobPb toPb() { + AppResourceJobPb pb = new AppResourceJobPb(); + pb.setId(id); + pb.setPermission(permission); + + return pb; + } + + static AppResourceJob fromPb(AppResourceJobPb pb) { + AppResourceJob model = new AppResourceJob(); + model.setId(pb.getId()); + model.setPermission(pb.getPermission()); + + return model; + } + + public static class AppResourceJobSerializer extends JsonSerializer { + @Override + public void serialize(AppResourceJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourceJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceJobDeserializer extends JsonDeserializer { + @Override + public AppResourceJob deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourceJobPb pb = mapper.readValue(p, AppResourceJobPb.class); + return AppResourceJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobPb.java new file mode 100755 index 000000000..e5c9a2930 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourceJobPb { + @JsonProperty("id") + private String id; + + @JsonProperty("permission") + private AppResourceJobJobPermission permission; + + public AppResourceJobPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AppResourceJobPb setPermission(AppResourceJobJobPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceJobJobPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceJobPb that = (AppResourceJobPb) o; + return Objects.equals(id, that.id) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(id, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceJobPb.class) + .add("id", id) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourcePb.java new file mode 100755 index 000000000..26e159074 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourcePb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourcePb { + @JsonProperty("description") + private String description; + + @JsonProperty("job") + private AppResourceJob job; + + @JsonProperty("name") + private String name; + + @JsonProperty("secret") + private AppResourceSecret secret; + + @JsonProperty("serving_endpoint") + private AppResourceServingEndpoint servingEndpoint; + + @JsonProperty("sql_warehouse") + private AppResourceSqlWarehouse sqlWarehouse; + + @JsonProperty("uc_securable") + private AppResourceUcSecurable ucSecurable; + + public AppResourcePb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AppResourcePb setJob(AppResourceJob job) { + this.job = job; + return this; + } + + public AppResourceJob getJob() { + return job; + } + + public AppResourcePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResourcePb setSecret(AppResourceSecret secret) { + this.secret = secret; + return this; + } + + public AppResourceSecret getSecret() { + return secret; + } + + public AppResourcePb setServingEndpoint(AppResourceServingEndpoint servingEndpoint) { + this.servingEndpoint = servingEndpoint; + return this; + } + + public AppResourceServingEndpoint getServingEndpoint() { + return servingEndpoint; + } + + public AppResourcePb setSqlWarehouse(AppResourceSqlWarehouse sqlWarehouse) { + this.sqlWarehouse = sqlWarehouse; + return this; + } + + public AppResourceSqlWarehouse getSqlWarehouse() { + return sqlWarehouse; + } + + public AppResourcePb setUcSecurable(AppResourceUcSecurable ucSecurable) { + this.ucSecurable = ucSecurable; + return this; + } + + public AppResourceUcSecurable getUcSecurable() { + return ucSecurable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourcePb that = (AppResourcePb) o; + return Objects.equals(description, that.description) + && Objects.equals(job, that.job) + && Objects.equals(name, that.name) + && Objects.equals(secret, that.secret) + && Objects.equals(servingEndpoint, that.servingEndpoint) + && Objects.equals(sqlWarehouse, that.sqlWarehouse) + && Objects.equals(ucSecurable, that.ucSecurable); + } + + @Override + public int hashCode() { + return Objects.hash(description, job, name, secret, servingEndpoint, sqlWarehouse, ucSecurable); + } + + @Override + public String toString() { + return new ToStringer(AppResourcePb.class) + .add("description", description) + .add("job", job) + .add("name", name) + .add("secret", secret) + .add("servingEndpoint", servingEndpoint) + .add("sqlWarehouse", sqlWarehouse) + .add("ucSecurable", ucSecurable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java index 4f32fb90a..1745960de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResourceSecret.AppResourceSecretSerializer.class) +@JsonDeserialize(using = AppResourceSecret.AppResourceSecretDeserializer.class) public class AppResourceSecret { /** Key of the secret to grant permission on. */ - @JsonProperty("key") private String key; /** * Permission to grant on the secret scope. For secrets, only one permission is allowed. * Permission must be one of: "READ", "WRITE", "MANAGE". */ - @JsonProperty("permission") private AppResourceSecretSecretPermission permission; /** Scope of the secret to grant permission on. */ - @JsonProperty("scope") private String scope; public AppResourceSecret setKey(String key) { @@ -74,4 +82,42 @@ public String toString() { .add("scope", scope) .toString(); } + + AppResourceSecretPb toPb() { + AppResourceSecretPb pb = new AppResourceSecretPb(); + pb.setKey(key); + pb.setPermission(permission); + pb.setScope(scope); + + return pb; + } + + static AppResourceSecret fromPb(AppResourceSecretPb pb) { + AppResourceSecret model = new AppResourceSecret(); + model.setKey(pb.getKey()); + model.setPermission(pb.getPermission()); + model.setScope(pb.getScope()); + + return model; + } + + public static class AppResourceSecretSerializer extends JsonSerializer { + @Override + public void serialize(AppResourceSecret value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourceSecretPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceSecretDeserializer extends JsonDeserializer { + @Override + public AppResourceSecret deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourceSecretPb pb = mapper.readValue(p, AppResourceSecretPb.class); + return AppResourceSecret.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretPb.java new file mode 100755 index 000000000..2a2c76151 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourceSecretPb { + @JsonProperty("key") + private String key; + + @JsonProperty("permission") + private AppResourceSecretSecretPermission permission; + + @JsonProperty("scope") + private String scope; + + public AppResourceSecretPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public AppResourceSecretPb setPermission(AppResourceSecretSecretPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceSecretSecretPermission getPermission() { + return permission; + } + + public AppResourceSecretPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceSecretPb that = (AppResourceSecretPb) o; + return Objects.equals(key, that.key) + && Objects.equals(permission, that.permission) + && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(key, permission, scope); + } + + @Override + public String toString() { + return new ToStringer(AppResourceSecretPb.class) + .add("key", key) + .add("permission", permission) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java index f52937c59..e850b202f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResourceServingEndpoint.AppResourceServingEndpointSerializer.class) +@JsonDeserialize(using = AppResourceServingEndpoint.AppResourceServingEndpointDeserializer.class) public class AppResourceServingEndpoint { /** Name of the serving endpoint to grant permission on. */ - @JsonProperty("name") private String name; /** * Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", * "CAN_QUERY", "CAN_VIEW". */ - @JsonProperty("permission") private AppResourceServingEndpointServingEndpointPermission permission; public AppResourceServingEndpoint setName(String name) { @@ -59,4 +68,43 @@ public String toString() { .add("permission", permission) .toString(); } + + AppResourceServingEndpointPb toPb() { + AppResourceServingEndpointPb pb = new AppResourceServingEndpointPb(); + pb.setName(name); + pb.setPermission(permission); + + return pb; + } + + static AppResourceServingEndpoint fromPb(AppResourceServingEndpointPb pb) { + AppResourceServingEndpoint model = new AppResourceServingEndpoint(); + model.setName(pb.getName()); + model.setPermission(pb.getPermission()); + + return model; + } + + public static class AppResourceServingEndpointSerializer + extends JsonSerializer { + @Override + public void serialize( + AppResourceServingEndpoint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourceServingEndpointPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceServingEndpointDeserializer + extends JsonDeserializer { + @Override + public AppResourceServingEndpoint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourceServingEndpointPb pb = mapper.readValue(p, AppResourceServingEndpointPb.class); + return AppResourceServingEndpoint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointPb.java new file mode 100755 index 000000000..318db9104 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourceServingEndpointPb { + @JsonProperty("name") + private String name; + + @JsonProperty("permission") + private AppResourceServingEndpointServingEndpointPermission permission; + + public AppResourceServingEndpointPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResourceServingEndpointPb setPermission( + AppResourceServingEndpointServingEndpointPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceServingEndpointServingEndpointPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceServingEndpointPb that = (AppResourceServingEndpointPb) o; + return Objects.equals(name, that.name) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(name, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceServingEndpointPb.class) + .add("name", name) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java index f654d2285..60ebb2311 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResourceSqlWarehouse.AppResourceSqlWarehouseSerializer.class) +@JsonDeserialize(using = AppResourceSqlWarehouse.AppResourceSqlWarehouseDeserializer.class) public class AppResourceSqlWarehouse { /** Id of the SQL warehouse to grant permission on. */ - @JsonProperty("id") private String id; /** * Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", * "IS_OWNER". */ - @JsonProperty("permission") private AppResourceSqlWarehouseSqlWarehousePermission permission; public AppResourceSqlWarehouse setId(String id) { @@ -59,4 +68,43 @@ public String toString() { .add("permission", permission) .toString(); } + + AppResourceSqlWarehousePb toPb() { + AppResourceSqlWarehousePb pb = new AppResourceSqlWarehousePb(); + pb.setId(id); + pb.setPermission(permission); + + return pb; + } + + static AppResourceSqlWarehouse fromPb(AppResourceSqlWarehousePb pb) { + AppResourceSqlWarehouse model = new AppResourceSqlWarehouse(); + model.setId(pb.getId()); + model.setPermission(pb.getPermission()); + + return model; + } + + public static class AppResourceSqlWarehouseSerializer + extends JsonSerializer { + @Override + public void serialize( + AppResourceSqlWarehouse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourceSqlWarehousePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceSqlWarehouseDeserializer + extends JsonDeserializer { + @Override + public AppResourceSqlWarehouse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourceSqlWarehousePb pb = mapper.readValue(p, AppResourceSqlWarehousePb.class); + return AppResourceSqlWarehouse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehousePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehousePb.java new file mode 100755 index 000000000..b8371ed72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehousePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourceSqlWarehousePb { + @JsonProperty("id") + private String id; + + @JsonProperty("permission") + private AppResourceSqlWarehouseSqlWarehousePermission permission; + + public AppResourceSqlWarehousePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AppResourceSqlWarehousePb setPermission( + AppResourceSqlWarehouseSqlWarehousePermission permission) { + this.permission = permission; + return this; + } + + public AppResourceSqlWarehouseSqlWarehousePermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceSqlWarehousePb that = (AppResourceSqlWarehousePb) o; + return Objects.equals(id, that.id) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(id, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceSqlWarehousePb.class) + .add("id", id) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java index 8944df179..63e11dacc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AppResourceUcSecurable.AppResourceUcSecurableSerializer.class) +@JsonDeserialize(using = AppResourceUcSecurable.AppResourceUcSecurableDeserializer.class) public class AppResourceUcSecurable { /** */ - @JsonProperty("permission") private AppResourceUcSecurableUcSecurablePermission permission; /** */ - @JsonProperty("securable_full_name") private String securableFullName; /** */ - @JsonProperty("securable_type") private AppResourceUcSecurableUcSecurableType securableType; public AppResourceUcSecurable setPermission( @@ -73,4 +81,45 @@ public String toString() { .add("securableType", securableType) .toString(); } + + AppResourceUcSecurablePb toPb() { + AppResourceUcSecurablePb pb = new AppResourceUcSecurablePb(); + pb.setPermission(permission); + pb.setSecurableFullName(securableFullName); + pb.setSecurableType(securableType); + + return pb; + } + + static AppResourceUcSecurable fromPb(AppResourceUcSecurablePb pb) { + AppResourceUcSecurable model = new AppResourceUcSecurable(); + model.setPermission(pb.getPermission()); + model.setSecurableFullName(pb.getSecurableFullName()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class AppResourceUcSecurableSerializer + extends JsonSerializer { + @Override + public void serialize( + AppResourceUcSecurable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AppResourceUcSecurablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AppResourceUcSecurableDeserializer + extends JsonDeserializer { + @Override + public AppResourceUcSecurable deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AppResourceUcSecurablePb pb = mapper.readValue(p, AppResourceUcSecurablePb.class); + return AppResourceUcSecurable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurablePb.java new file mode 100755 index 000000000..da1efbeb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurablePb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AppResourceUcSecurablePb { + @JsonProperty("permission") + private AppResourceUcSecurableUcSecurablePermission permission; + + @JsonProperty("securable_full_name") + private String securableFullName; + + @JsonProperty("securable_type") + private AppResourceUcSecurableUcSecurableType securableType; + + public AppResourceUcSecurablePb setPermission( + AppResourceUcSecurableUcSecurablePermission permission) { + this.permission = permission; + return this; + } + + public AppResourceUcSecurableUcSecurablePermission getPermission() { + return permission; + } + + public AppResourceUcSecurablePb setSecurableFullName(String securableFullName) { + this.securableFullName = securableFullName; + return this; + } + + public String getSecurableFullName() { + return securableFullName; + } + + public AppResourceUcSecurablePb setSecurableType( + AppResourceUcSecurableUcSecurableType securableType) { + this.securableType = securableType; + return this; + } + + public AppResourceUcSecurableUcSecurableType getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceUcSecurablePb that = (AppResourceUcSecurablePb) o; + return Objects.equals(permission, that.permission) + && Objects.equals(securableFullName, that.securableFullName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(permission, securableFullName, securableType); + } + + @Override + public String toString() { + return new ToStringer(AppResourceUcSecurablePb.class) + .add("permission", permission) + .add("securableFullName", securableFullName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java index e86a89b16..ca18e9b76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ApplicationStatus.ApplicationStatusSerializer.class) +@JsonDeserialize(using = ApplicationStatus.ApplicationStatusDeserializer.class) public class ApplicationStatus { /** Application status message */ - @JsonProperty("message") private String message; /** State of the application. */ - @JsonProperty("state") private ApplicationState state; public ApplicationStatus setMessage(String message) { @@ -55,4 +64,40 @@ public String toString() { .add("state", state) .toString(); } + + ApplicationStatusPb toPb() { + ApplicationStatusPb pb = new ApplicationStatusPb(); + pb.setMessage(message); + pb.setState(state); + + return pb; + } + + static ApplicationStatus fromPb(ApplicationStatusPb pb) { + ApplicationStatus model = new ApplicationStatus(); + model.setMessage(pb.getMessage()); + model.setState(pb.getState()); + + return model; + } + + public static class ApplicationStatusSerializer extends JsonSerializer { + @Override + public void serialize(ApplicationStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ApplicationStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ApplicationStatusDeserializer extends JsonDeserializer { + @Override + public ApplicationStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ApplicationStatusPb pb = mapper.readValue(p, ApplicationStatusPb.class); + return ApplicationStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatusPb.java new file mode 100755 index 000000000..489647be8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatusPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ApplicationStatusPb { + @JsonProperty("message") + private String message; + + @JsonProperty("state") + private ApplicationState state; + + public ApplicationStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public ApplicationStatusPb setState(ApplicationState state) { + this.state = state; + return this; + } + + public ApplicationState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApplicationStatusPb that = (ApplicationStatusPb) o; + return Objects.equals(message, that.message) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(message, state); + } + + @Override + public String toString() { + return new ToStringer(ApplicationStatusPb.class) + .add("message", message) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index 0e6ec9ff6..f83e5cd8e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -21,7 +21,7 @@ public App create(CreateAppRequest request) { String path = "/api/2.0/apps"; try { Request req = new Request("POST", path, apiClient.serialize(request.getApp())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, App.class); @@ -35,7 +35,7 @@ public App delete(DeleteAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, App.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public AppDeployment deploy(CreateAppDeploymentRequest request) { String path = String.format("/api/2.0/apps/%s/deployments", request.getAppName()); try { Request req = new Request("POST", path, apiClient.serialize(request.getAppDeployment())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AppDeployment.class); @@ -62,7 +62,7 @@ public App get(GetAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, App.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public AppDeployment getDeployment(GetAppDeploymentRequest request) { "/api/2.0/apps/%s/deployments/%s", request.getAppName(), request.getDeploymentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AppDeployment.class); } catch (IOException e) { @@ -91,7 +91,7 @@ public GetAppPermissionLevelsResponse getPermissionLevels(GetAppPermissionLevels String.format("/api/2.0/permissions/apps/%s/permissionLevels", request.getAppName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetAppPermissionLevelsResponse.class); } catch (IOException e) { @@ -104,7 +104,7 @@ public AppPermissions getPermissions(GetAppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AppPermissions.class); } catch (IOException e) { @@ -117,7 +117,7 @@ public ListAppsResponse list(ListAppsRequest request) { String path = "/api/2.0/apps"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAppsResponse.class); } catch (IOException e) { @@ -130,7 +130,7 @@ public ListAppDeploymentsResponse listDeployments(ListAppDeploymentsRequest requ String path = String.format("/api/2.0/apps/%s/deployments", request.getAppName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAppDeploymentsResponse.class); } catch (IOException e) { @@ -143,7 +143,7 @@ public AppPermissions setPermissions(AppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AppPermissions.class); @@ -157,7 +157,7 @@ public App start(StartAppRequest request) { String path = String.format("/api/2.0/apps/%s/start", request.getName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, App.class); @@ -171,7 +171,7 @@ public App stop(StopAppRequest request) { String path = String.format("/api/2.0/apps/%s/stop", request.getName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, App.class); @@ -185,7 +185,7 @@ public App update(UpdateAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getApp())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, App.class); @@ -199,7 +199,7 @@ public AppPermissions updatePermissions(AppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AppPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java index 0af20eb5e..d815ca777 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ComputeStatus.ComputeStatusSerializer.class) +@JsonDeserialize(using = ComputeStatus.ComputeStatusDeserializer.class) public class ComputeStatus { /** Compute status message */ - @JsonProperty("message") private String message; /** State of the app compute. */ - @JsonProperty("state") private ComputeState state; public ComputeStatus setMessage(String message) { @@ -55,4 +64,39 @@ public String toString() { .add("state", state) .toString(); } + + ComputeStatusPb toPb() { + ComputeStatusPb pb = new ComputeStatusPb(); + pb.setMessage(message); + pb.setState(state); + + return pb; + } + + static ComputeStatus fromPb(ComputeStatusPb pb) { + ComputeStatus model = new ComputeStatus(); + model.setMessage(pb.getMessage()); + model.setState(pb.getState()); + + return model; + } + + public static class ComputeStatusSerializer extends JsonSerializer { + @Override + public void serialize(ComputeStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComputeStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComputeStatusDeserializer extends JsonDeserializer { + @Override + public ComputeStatus deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComputeStatusPb pb = mapper.readValue(p, ComputeStatusPb.class); + return ComputeStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatusPb.java new file mode 100755 index 000000000..011ffab84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatusPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ComputeStatusPb { + @JsonProperty("message") + private String message; + + @JsonProperty("state") + private ComputeState state; + + public ComputeStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public ComputeStatusPb setState(ComputeState state) { + this.state = state; + return this; + } + + public ComputeState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComputeStatusPb that = (ComputeStatusPb) o; + return Objects.equals(message, that.message) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(message, state); + } + + @Override + public String toString() { + return new ToStringer(ComputeStatusPb.class) + .add("message", message) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Converters.java new file mode 100755 index 000000000..8b46ec0be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.apps; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java index 1d0425673..66887bde0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an app deployment */ @Generated +@JsonSerialize(using = CreateAppDeploymentRequest.CreateAppDeploymentRequestSerializer.class) +@JsonDeserialize(using = CreateAppDeploymentRequest.CreateAppDeploymentRequestDeserializer.class) public class CreateAppDeploymentRequest { /** */ - @JsonProperty("app_deployment") private AppDeployment appDeployment; /** The name of the app. */ - @JsonIgnore private String appName; + private String appName; public CreateAppDeploymentRequest setAppDeployment(AppDeployment appDeployment) { this.appDeployment = appDeployment; @@ -57,4 +66,43 @@ public String toString() { .add("appName", appName) .toString(); } + + CreateAppDeploymentRequestPb toPb() { + CreateAppDeploymentRequestPb pb = new CreateAppDeploymentRequestPb(); + pb.setAppDeployment(appDeployment); + pb.setAppName(appName); + + return pb; + } + + static CreateAppDeploymentRequest fromPb(CreateAppDeploymentRequestPb pb) { + CreateAppDeploymentRequest model = new CreateAppDeploymentRequest(); + model.setAppDeployment(pb.getAppDeployment()); + model.setAppName(pb.getAppName()); + + return model; + } + + public static class CreateAppDeploymentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateAppDeploymentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAppDeploymentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAppDeploymentRequestDeserializer + extends JsonDeserializer { + @Override + public CreateAppDeploymentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAppDeploymentRequestPb pb = mapper.readValue(p, CreateAppDeploymentRequestPb.class); + return CreateAppDeploymentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequestPb.java new file mode 100755 index 000000000..6e563bc0a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an app deployment */ +@Generated +class CreateAppDeploymentRequestPb { + @JsonProperty("app_deployment") + private AppDeployment appDeployment; + + @JsonIgnore private String appName; + + public CreateAppDeploymentRequestPb setAppDeployment(AppDeployment appDeployment) { + this.appDeployment = appDeployment; + return this; + } + + public AppDeployment getAppDeployment() { + return appDeployment; + } + + public CreateAppDeploymentRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAppDeploymentRequestPb that = (CreateAppDeploymentRequestPb) o; + return Objects.equals(appDeployment, that.appDeployment) + && Objects.equals(appName, that.appName); + } + + @Override + public int hashCode() { + return Objects.hash(appDeployment, appName); + } + + @Override + public String toString() { + return new ToStringer(CreateAppDeploymentRequestPb.class) + .add("appDeployment", appDeployment) + .add("appName", appName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java index 0a2d2eb59..7db01e4bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.apps; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an app */ @Generated +@JsonSerialize(using = CreateAppRequest.CreateAppRequestSerializer.class) +@JsonDeserialize(using = CreateAppRequest.CreateAppRequestDeserializer.class) public class CreateAppRequest { /** */ - @JsonProperty("app") private App app; /** If true, the app will not be started after creation. */ - @JsonIgnore - @QueryParam("no_compute") private Boolean noCompute; public CreateAppRequest setApp(App app) { @@ -59,4 +65,40 @@ public String toString() { .add("noCompute", noCompute) .toString(); } + + CreateAppRequestPb toPb() { + CreateAppRequestPb pb = new CreateAppRequestPb(); + pb.setApp(app); + pb.setNoCompute(noCompute); + + return pb; + } + + static CreateAppRequest fromPb(CreateAppRequestPb pb) { + CreateAppRequest model = new CreateAppRequest(); + model.setApp(pb.getApp()); + model.setNoCompute(pb.getNoCompute()); + + return model; + } + + public static class CreateAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAppRequestDeserializer extends JsonDeserializer { + @Override + public CreateAppRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAppRequestPb pb = mapper.readValue(p, CreateAppRequestPb.class); + return CreateAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequestPb.java new file mode 100755 index 000000000..5806b2f99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an app */ +@Generated +class CreateAppRequestPb { + @JsonProperty("app") + private App app; + + @JsonIgnore + @QueryParam("no_compute") + private Boolean noCompute; + + public CreateAppRequestPb setApp(App app) { + this.app = app; + return this; + } + + public App getApp() { + return app; + } + + public CreateAppRequestPb setNoCompute(Boolean noCompute) { + this.noCompute = noCompute; + return this; + } + + public Boolean getNoCompute() { + return noCompute; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAppRequestPb that = (CreateAppRequestPb) o; + return Objects.equals(app, that.app) && Objects.equals(noCompute, that.noCompute); + } + + @Override + public int hashCode() { + return Objects.hash(app, noCompute); + } + + @Override + public String toString() { + return new ToStringer(CreateAppRequestPb.class) + .add("app", app) + .add("noCompute", noCompute) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java index 16958972e..e38a1be2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an app */ @Generated +@JsonSerialize(using = DeleteAppRequest.DeleteAppRequestSerializer.class) +@JsonDeserialize(using = DeleteAppRequest.DeleteAppRequestDeserializer.class) public class DeleteAppRequest { /** The name of the app. */ - @JsonIgnore private String name; + private String name; public DeleteAppRequest setName(String name) { this.name = name; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAppRequest.class).add("name", name).toString(); } + + DeleteAppRequestPb toPb() { + DeleteAppRequestPb pb = new DeleteAppRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteAppRequest fromPb(DeleteAppRequestPb pb) { + DeleteAppRequest model = new DeleteAppRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAppRequestDeserializer extends JsonDeserializer { + @Override + public DeleteAppRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAppRequestPb pb = mapper.readValue(p, DeleteAppRequestPb.class); + return DeleteAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequestPb.java new file mode 100755 index 000000000..5d78aa7d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an app */ +@Generated +class DeleteAppRequestPb { + @JsonIgnore private String name; + + public DeleteAppRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAppRequestPb that = (DeleteAppRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteAppRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java index 6f34447d1..6632c1540 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an app deployment */ @Generated +@JsonSerialize(using = GetAppDeploymentRequest.GetAppDeploymentRequestSerializer.class) +@JsonDeserialize(using = GetAppDeploymentRequest.GetAppDeploymentRequestDeserializer.class) public class GetAppDeploymentRequest { /** The name of the app. */ - @JsonIgnore private String appName; + private String appName; /** The unique id of the deployment. */ - @JsonIgnore private String deploymentId; + private String deploymentId; public GetAppDeploymentRequest setAppName(String appName) { this.appName = appName; @@ -54,4 +65,43 @@ public String toString() { .add("deploymentId", deploymentId) .toString(); } + + GetAppDeploymentRequestPb toPb() { + GetAppDeploymentRequestPb pb = new GetAppDeploymentRequestPb(); + pb.setAppName(appName); + pb.setDeploymentId(deploymentId); + + return pb; + } + + static GetAppDeploymentRequest fromPb(GetAppDeploymentRequestPb pb) { + GetAppDeploymentRequest model = new GetAppDeploymentRequest(); + model.setAppName(pb.getAppName()); + model.setDeploymentId(pb.getDeploymentId()); + + return model; + } + + public static class GetAppDeploymentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAppDeploymentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAppDeploymentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAppDeploymentRequestDeserializer + extends JsonDeserializer { + @Override + public GetAppDeploymentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAppDeploymentRequestPb pb = mapper.readValue(p, GetAppDeploymentRequestPb.class); + return GetAppDeploymentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequestPb.java new file mode 100755 index 000000000..58c88cd37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an app deployment */ +@Generated +class GetAppDeploymentRequestPb { + @JsonIgnore private String appName; + + @JsonIgnore private String deploymentId; + + public GetAppDeploymentRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + public GetAppDeploymentRequestPb setDeploymentId(String deploymentId) { + this.deploymentId = deploymentId; + return this; + } + + public String getDeploymentId() { + return deploymentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppDeploymentRequestPb that = (GetAppDeploymentRequestPb) o; + return Objects.equals(appName, that.appName) && Objects.equals(deploymentId, that.deploymentId); + } + + @Override + public int hashCode() { + return Objects.hash(appName, deploymentId); + } + + @Override + public String toString() { + return new ToStringer(GetAppDeploymentRequestPb.class) + .add("appName", appName) + .add("deploymentId", deploymentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java index bb2651075..20ff9b5fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get app permission levels */ @Generated +@JsonSerialize(using = GetAppPermissionLevelsRequest.GetAppPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = GetAppPermissionLevelsRequest.GetAppPermissionLevelsRequestDeserializer.class) public class GetAppPermissionLevelsRequest { /** The app for which to get or manage permissions. */ - @JsonIgnore private String appName; + private String appName; public GetAppPermissionLevelsRequest setAppName(String appName) { this.appName = appName; @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetAppPermissionLevelsRequest.class).add("appName", appName).toString(); } + + GetAppPermissionLevelsRequestPb toPb() { + GetAppPermissionLevelsRequestPb pb = new GetAppPermissionLevelsRequestPb(); + pb.setAppName(appName); + + return pb; + } + + static GetAppPermissionLevelsRequest fromPb(GetAppPermissionLevelsRequestPb pb) { + GetAppPermissionLevelsRequest model = new GetAppPermissionLevelsRequest(); + model.setAppName(pb.getAppName()); + + return model; + } + + public static class GetAppPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAppPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAppPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAppPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetAppPermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAppPermissionLevelsRequestPb pb = + mapper.readValue(p, GetAppPermissionLevelsRequestPb.class); + return GetAppPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequestPb.java new file mode 100755 index 000000000..c01a9b800 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get app permission levels */ +@Generated +class GetAppPermissionLevelsRequestPb { + @JsonIgnore private String appName; + + public GetAppPermissionLevelsRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppPermissionLevelsRequestPb that = (GetAppPermissionLevelsRequestPb) o; + return Objects.equals(appName, that.appName); + } + + @Override + public int hashCode() { + return Objects.hash(appName); + } + + @Override + public String toString() { + return new ToStringer(GetAppPermissionLevelsRequestPb.class).add("appName", appName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java index a8a41f7da..83c7e8877 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetAppPermissionLevelsResponse.GetAppPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = GetAppPermissionLevelsResponse.GetAppPermissionLevelsResponseDeserializer.class) public class GetAppPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetAppPermissionLevelsResponse setPermissionLevels( @@ -43,4 +55,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetAppPermissionLevelsResponsePb toPb() { + GetAppPermissionLevelsResponsePb pb = new GetAppPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetAppPermissionLevelsResponse fromPb(GetAppPermissionLevelsResponsePb pb) { + GetAppPermissionLevelsResponse model = new GetAppPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetAppPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAppPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAppPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAppPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetAppPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAppPermissionLevelsResponsePb pb = + mapper.readValue(p, GetAppPermissionLevelsResponsePb.class); + return GetAppPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponsePb.java new file mode 100755 index 000000000..a53bba528 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetAppPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetAppPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppPermissionLevelsResponsePb that = (GetAppPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetAppPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java index bc44a9252..ceae0dd1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get app permissions */ @Generated +@JsonSerialize(using = GetAppPermissionsRequest.GetAppPermissionsRequestSerializer.class) +@JsonDeserialize(using = GetAppPermissionsRequest.GetAppPermissionsRequestDeserializer.class) public class GetAppPermissionsRequest { /** The app for which to get or manage permissions. */ - @JsonIgnore private String appName; + private String appName; public GetAppPermissionsRequest setAppName(String appName) { this.appName = appName; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetAppPermissionsRequest.class).add("appName", appName).toString(); } + + GetAppPermissionsRequestPb toPb() { + GetAppPermissionsRequestPb pb = new GetAppPermissionsRequestPb(); + pb.setAppName(appName); + + return pb; + } + + static GetAppPermissionsRequest fromPb(GetAppPermissionsRequestPb pb) { + GetAppPermissionsRequest model = new GetAppPermissionsRequest(); + model.setAppName(pb.getAppName()); + + return model; + } + + public static class GetAppPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAppPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAppPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAppPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetAppPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAppPermissionsRequestPb pb = mapper.readValue(p, GetAppPermissionsRequestPb.class); + return GetAppPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequestPb.java new file mode 100755 index 000000000..9dbe40ce0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get app permissions */ +@Generated +class GetAppPermissionsRequestPb { + @JsonIgnore private String appName; + + public GetAppPermissionsRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppPermissionsRequestPb that = (GetAppPermissionsRequestPb) o; + return Objects.equals(appName, that.appName); + } + + @Override + public int hashCode() { + return Objects.hash(appName); + } + + @Override + public String toString() { + return new ToStringer(GetAppPermissionsRequestPb.class).add("appName", appName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java index ec758e4b7..5c9133e47 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an app */ @Generated +@JsonSerialize(using = GetAppRequest.GetAppRequestSerializer.class) +@JsonDeserialize(using = GetAppRequest.GetAppRequestDeserializer.class) public class GetAppRequest { /** The name of the app. */ - @JsonIgnore private String name; + private String name; public GetAppRequest setName(String name) { this.name = name; @@ -39,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(GetAppRequest.class).add("name", name).toString(); } + + GetAppRequestPb toPb() { + GetAppRequestPb pb = new GetAppRequestPb(); + pb.setName(name); + + return pb; + } + + static GetAppRequest fromPb(GetAppRequestPb pb) { + GetAppRequest model = new GetAppRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAppRequestDeserializer extends JsonDeserializer { + @Override + public GetAppRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAppRequestPb pb = mapper.readValue(p, GetAppRequestPb.class); + return GetAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequestPb.java new file mode 100755 index 000000000..1ea4c67e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an app */ +@Generated +class GetAppRequestPb { + @JsonIgnore private String name; + + public GetAppRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppRequestPb that = (GetAppRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetAppRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java index 521799ca5..b1e0800a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.apps; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List app deployments */ @Generated +@JsonSerialize(using = ListAppDeploymentsRequest.ListAppDeploymentsRequestSerializer.class) +@JsonDeserialize(using = ListAppDeploymentsRequest.ListAppDeploymentsRequestDeserializer.class) public class ListAppDeploymentsRequest { /** The name of the app. */ - @JsonIgnore private String appName; + private String appName; /** Upper bound for items returned. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Pagination token to go to the next page of apps. Requests first page if absent. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAppDeploymentsRequest setAppName(String appName) { @@ -74,4 +80,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAppDeploymentsRequestPb toPb() { + ListAppDeploymentsRequestPb pb = new ListAppDeploymentsRequestPb(); + pb.setAppName(appName); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAppDeploymentsRequest fromPb(ListAppDeploymentsRequestPb pb) { + ListAppDeploymentsRequest model = new ListAppDeploymentsRequest(); + model.setAppName(pb.getAppName()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAppDeploymentsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAppDeploymentsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAppDeploymentsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAppDeploymentsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAppDeploymentsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAppDeploymentsRequestPb pb = mapper.readValue(p, ListAppDeploymentsRequestPb.class); + return ListAppDeploymentsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequestPb.java new file mode 100755 index 000000000..5eeb64c06 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List app deployments */ +@Generated +class ListAppDeploymentsRequestPb { + @JsonIgnore private String appName; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAppDeploymentsRequestPb setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + public ListAppDeploymentsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAppDeploymentsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAppDeploymentsRequestPb that = (ListAppDeploymentsRequestPb) o; + return Objects.equals(appName, that.appName) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(appName, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAppDeploymentsRequestPb.class) + .add("appName", appName) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java index 3feef0752..3f9581514 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAppDeploymentsResponse.ListAppDeploymentsResponseSerializer.class) +@JsonDeserialize(using = ListAppDeploymentsResponse.ListAppDeploymentsResponseDeserializer.class) public class ListAppDeploymentsResponse { /** Deployment history of the app. */ - @JsonProperty("app_deployments") private Collection appDeployments; /** Pagination token to request the next page of apps. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListAppDeploymentsResponse setAppDeployments(Collection appDeployments) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListAppDeploymentsResponsePb toPb() { + ListAppDeploymentsResponsePb pb = new ListAppDeploymentsResponsePb(); + pb.setAppDeployments(appDeployments); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListAppDeploymentsResponse fromPb(ListAppDeploymentsResponsePb pb) { + ListAppDeploymentsResponse model = new ListAppDeploymentsResponse(); + model.setAppDeployments(pb.getAppDeployments()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListAppDeploymentsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAppDeploymentsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAppDeploymentsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAppDeploymentsResponseDeserializer + extends JsonDeserializer { + @Override + public ListAppDeploymentsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAppDeploymentsResponsePb pb = mapper.readValue(p, ListAppDeploymentsResponsePb.class); + return ListAppDeploymentsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponsePb.java new file mode 100755 index 000000000..cd3a1242b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAppDeploymentsResponsePb { + @JsonProperty("app_deployments") + private Collection appDeployments; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListAppDeploymentsResponsePb setAppDeployments(Collection appDeployments) { + this.appDeployments = appDeployments; + return this; + } + + public Collection getAppDeployments() { + return appDeployments; + } + + public ListAppDeploymentsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAppDeploymentsResponsePb that = (ListAppDeploymentsResponsePb) o; + return Objects.equals(appDeployments, that.appDeployments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(appDeployments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAppDeploymentsResponsePb.class) + .add("appDeployments", appDeployments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java index be3a51190..c8d7f7662 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.apps; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List apps */ @Generated +@JsonSerialize(using = ListAppsRequest.ListAppsRequestSerializer.class) +@JsonDeserialize(using = ListAppsRequest.ListAppsRequestDeserializer.class) public class ListAppsRequest { /** Upper bound for items returned. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Pagination token to go to the next page of apps. Requests first page if absent. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAppsRequest setPageSize(Long pageSize) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAppsRequestPb toPb() { + ListAppsRequestPb pb = new ListAppsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAppsRequest fromPb(ListAppsRequestPb pb) { + ListAppsRequest model = new ListAppsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAppsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListAppsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAppsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAppsRequestDeserializer extends JsonDeserializer { + @Override + public ListAppsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAppsRequestPb pb = mapper.readValue(p, ListAppsRequestPb.class); + return ListAppsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequestPb.java new file mode 100755 index 000000000..a64a28eb5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List apps */ +@Generated +class ListAppsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAppsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAppsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAppsRequestPb that = (ListAppsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAppsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponse.java index 20887ad7f..a13df6b3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAppsResponse.ListAppsResponseSerializer.class) +@JsonDeserialize(using = ListAppsResponse.ListAppsResponseDeserializer.class) public class ListAppsResponse { /** */ - @JsonProperty("apps") private Collection apps; /** Pagination token to request the next page of apps. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListAppsResponse setApps(Collection apps) { @@ -56,4 +65,40 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListAppsResponsePb toPb() { + ListAppsResponsePb pb = new ListAppsResponsePb(); + pb.setApps(apps); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListAppsResponse fromPb(ListAppsResponsePb pb) { + ListAppsResponse model = new ListAppsResponse(); + model.setApps(pb.getApps()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListAppsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListAppsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAppsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAppsResponseDeserializer extends JsonDeserializer { + @Override + public ListAppsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAppsResponsePb pb = mapper.readValue(p, ListAppsResponsePb.class); + return ListAppsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponsePb.java new file mode 100755 index 000000000..c589a9dcd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAppsResponsePb { + @JsonProperty("apps") + private Collection apps; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListAppsResponsePb setApps(Collection apps) { + this.apps = apps; + return this; + } + + public Collection getApps() { + return apps; + } + + public ListAppsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAppsResponsePb that = (ListAppsResponsePb) o; + return Objects.equals(apps, that.apps) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(apps, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAppsResponsePb.class) + .add("apps", apps) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java index 47d786b94..e2902d276 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StartAppRequest.StartAppRequestSerializer.class) +@JsonDeserialize(using = StartAppRequest.StartAppRequestDeserializer.class) public class StartAppRequest { /** The name of the app. */ - @JsonIgnore private String name; + private String name; public StartAppRequest setName(String name) { this.name = name; @@ -38,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(StartAppRequest.class).add("name", name).toString(); } + + StartAppRequestPb toPb() { + StartAppRequestPb pb = new StartAppRequestPb(); + pb.setName(name); + + return pb; + } + + static StartAppRequest fromPb(StartAppRequestPb pb) { + StartAppRequest model = new StartAppRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class StartAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(StartAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartAppRequestDeserializer extends JsonDeserializer { + @Override + public StartAppRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartAppRequestPb pb = mapper.readValue(p, StartAppRequestPb.class); + return StartAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequestPb.java new file mode 100755 index 000000000..b5f24611a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequestPb.java @@ -0,0 +1,40 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +class StartAppRequestPb { + @JsonIgnore private String name; + + public StartAppRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartAppRequestPb that = (StartAppRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(StartAppRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java index 3a898c1f0..dfa46d02e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StopAppRequest.StopAppRequestSerializer.class) +@JsonDeserialize(using = StopAppRequest.StopAppRequestDeserializer.class) public class StopAppRequest { /** The name of the app. */ - @JsonIgnore private String name; + private String name; public StopAppRequest setName(String name) { this.name = name; @@ -38,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(StopAppRequest.class).add("name", name).toString(); } + + StopAppRequestPb toPb() { + StopAppRequestPb pb = new StopAppRequestPb(); + pb.setName(name); + + return pb; + } + + static StopAppRequest fromPb(StopAppRequestPb pb) { + StopAppRequest model = new StopAppRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class StopAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(StopAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StopAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StopAppRequestDeserializer extends JsonDeserializer { + @Override + public StopAppRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StopAppRequestPb pb = mapper.readValue(p, StopAppRequestPb.class); + return StopAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequestPb.java new file mode 100755 index 000000000..40946ae55 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequestPb.java @@ -0,0 +1,40 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +class StopAppRequestPb { + @JsonIgnore private String name; + + public StopAppRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StopAppRequestPb that = (StopAppRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(StopAppRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java index 09baf63ac..1ff6c6296 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update an app */ @Generated +@JsonSerialize(using = UpdateAppRequest.UpdateAppRequestSerializer.class) +@JsonDeserialize(using = UpdateAppRequest.UpdateAppRequestDeserializer.class) public class UpdateAppRequest { /** */ - @JsonProperty("app") private App app; /** * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. * It must be unique within the workspace. */ - @JsonIgnore private String name; + private String name; public UpdateAppRequest setApp(App app) { this.app = app; @@ -56,4 +65,40 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateAppRequest.class).add("app", app).add("name", name).toString(); } + + UpdateAppRequestPb toPb() { + UpdateAppRequestPb pb = new UpdateAppRequestPb(); + pb.setApp(app); + pb.setName(name); + + return pb; + } + + static UpdateAppRequest fromPb(UpdateAppRequestPb pb) { + UpdateAppRequest model = new UpdateAppRequest(); + model.setApp(pb.getApp()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateAppRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateAppRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAppRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAppRequestDeserializer extends JsonDeserializer { + @Override + public UpdateAppRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAppRequestPb pb = mapper.readValue(p, UpdateAppRequestPb.class); + return UpdateAppRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequestPb.java new file mode 100755 index 000000000..cd414f4c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequestPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update an app */ +@Generated +class UpdateAppRequestPb { + @JsonProperty("app") + private App app; + + @JsonIgnore private String name; + + public UpdateAppRequestPb setApp(App app) { + this.app = app; + return this; + } + + public App getApp() { + return app; + } + + public UpdateAppRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAppRequestPb that = (UpdateAppRequestPb) o; + return Objects.equals(app, that.app) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(app, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateAppRequestPb.class).add("app", app).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfiguration.java index 8ec44d018..543d1d1cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfiguration.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ActionConfiguration.ActionConfigurationSerializer.class) +@JsonDeserialize(using = ActionConfiguration.ActionConfigurationDeserializer.class) public class ActionConfiguration { /** Databricks action configuration ID. */ - @JsonProperty("action_configuration_id") private String actionConfigurationId; /** The type of the action. */ - @JsonProperty("action_type") private ActionConfigurationType actionType; /** Target for the action. For example, an email address. */ - @JsonProperty("target") private String target; public ActionConfiguration setActionConfigurationId(String actionConfigurationId) { @@ -71,4 +79,43 @@ public String toString() { .add("target", target) .toString(); } + + ActionConfigurationPb toPb() { + ActionConfigurationPb pb = new ActionConfigurationPb(); + pb.setActionConfigurationId(actionConfigurationId); + pb.setActionType(actionType); + pb.setTarget(target); + + return pb; + } + + static ActionConfiguration fromPb(ActionConfigurationPb pb) { + ActionConfiguration model = new ActionConfiguration(); + model.setActionConfigurationId(pb.getActionConfigurationId()); + model.setActionType(pb.getActionType()); + model.setTarget(pb.getTarget()); + + return model; + } + + public static class ActionConfigurationSerializer extends JsonSerializer { + @Override + public void serialize(ActionConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ActionConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ActionConfigurationDeserializer + extends JsonDeserializer { + @Override + public ActionConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ActionConfigurationPb pb = mapper.readValue(p, ActionConfigurationPb.class); + return ActionConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfigurationPb.java new file mode 100755 index 000000000..1f2768770 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfigurationPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ActionConfigurationPb { + @JsonProperty("action_configuration_id") + private String actionConfigurationId; + + @JsonProperty("action_type") + private ActionConfigurationType actionType; + + @JsonProperty("target") + private String target; + + public ActionConfigurationPb setActionConfigurationId(String actionConfigurationId) { + this.actionConfigurationId = actionConfigurationId; + return this; + } + + public String getActionConfigurationId() { + return actionConfigurationId; + } + + public ActionConfigurationPb setActionType(ActionConfigurationType actionType) { + this.actionType = actionType; + return this; + } + + public ActionConfigurationType getActionType() { + return actionType; + } + + public ActionConfigurationPb setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ActionConfigurationPb that = (ActionConfigurationPb) o; + return Objects.equals(actionConfigurationId, that.actionConfigurationId) + && Objects.equals(actionType, that.actionType) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(actionConfigurationId, actionType, target); + } + + @Override + public String toString() { + return new ToStringer(ActionConfigurationPb.class) + .add("actionConfigurationId", actionConfigurationId) + .add("actionType", actionType) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfiguration.java index 918ec6d08..780b9b271 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfiguration.java @@ -4,43 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AlertConfiguration.AlertConfigurationSerializer.class) +@JsonDeserialize(using = AlertConfiguration.AlertConfigurationDeserializer.class) public class AlertConfiguration { /** * Configured actions for this alert. These define what happens when an alert enters a triggered * state. */ - @JsonProperty("action_configurations") private Collection actionConfigurations; /** Databricks alert configuration ID. */ - @JsonProperty("alert_configuration_id") private String alertConfigurationId; /** * The threshold for the budget alert to determine if it is in a triggered state. The number is * evaluated based on `quantity_type`. */ - @JsonProperty("quantity_threshold") private String quantityThreshold; /** * The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured * in. */ - @JsonProperty("quantity_type") private AlertConfigurationQuantityType quantityType; /** The time window of usage data for the budget. */ - @JsonProperty("time_period") private AlertConfigurationTimePeriod timePeriod; /** The evaluation method to determine when this budget alert is in a triggered state. */ - @JsonProperty("trigger_type") private AlertConfigurationTriggerType triggerType; public AlertConfiguration setActionConfigurations( @@ -133,4 +138,48 @@ public String toString() { .add("triggerType", triggerType) .toString(); } + + AlertConfigurationPb toPb() { + AlertConfigurationPb pb = new AlertConfigurationPb(); + pb.setActionConfigurations(actionConfigurations); + pb.setAlertConfigurationId(alertConfigurationId); + pb.setQuantityThreshold(quantityThreshold); + pb.setQuantityType(quantityType); + pb.setTimePeriod(timePeriod); + pb.setTriggerType(triggerType); + + return pb; + } + + static AlertConfiguration fromPb(AlertConfigurationPb pb) { + AlertConfiguration model = new AlertConfiguration(); + model.setActionConfigurations(pb.getActionConfigurations()); + model.setAlertConfigurationId(pb.getAlertConfigurationId()); + model.setQuantityThreshold(pb.getQuantityThreshold()); + model.setQuantityType(pb.getQuantityType()); + model.setTimePeriod(pb.getTimePeriod()); + model.setTriggerType(pb.getTriggerType()); + + return model; + } + + public static class AlertConfigurationSerializer extends JsonSerializer { + @Override + public void serialize(AlertConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertConfigurationDeserializer extends JsonDeserializer { + @Override + public AlertConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertConfigurationPb pb = mapper.readValue(p, AlertConfigurationPb.class); + return AlertConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfigurationPb.java new file mode 100755 index 000000000..6f49206bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/AlertConfigurationPb.java @@ -0,0 +1,121 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AlertConfigurationPb { + @JsonProperty("action_configurations") + private Collection actionConfigurations; + + @JsonProperty("alert_configuration_id") + private String alertConfigurationId; + + @JsonProperty("quantity_threshold") + private String quantityThreshold; + + @JsonProperty("quantity_type") + private AlertConfigurationQuantityType quantityType; + + @JsonProperty("time_period") + private AlertConfigurationTimePeriod timePeriod; + + @JsonProperty("trigger_type") + private AlertConfigurationTriggerType triggerType; + + public AlertConfigurationPb setActionConfigurations( + Collection actionConfigurations) { + this.actionConfigurations = actionConfigurations; + return this; + } + + public Collection getActionConfigurations() { + return actionConfigurations; + } + + public AlertConfigurationPb setAlertConfigurationId(String alertConfigurationId) { + this.alertConfigurationId = alertConfigurationId; + return this; + } + + public String getAlertConfigurationId() { + return alertConfigurationId; + } + + public AlertConfigurationPb setQuantityThreshold(String quantityThreshold) { + this.quantityThreshold = quantityThreshold; + return this; + } + + public String getQuantityThreshold() { + return quantityThreshold; + } + + public AlertConfigurationPb setQuantityType(AlertConfigurationQuantityType quantityType) { + this.quantityType = quantityType; + return this; + } + + public AlertConfigurationQuantityType getQuantityType() { + return quantityType; + } + + public AlertConfigurationPb setTimePeriod(AlertConfigurationTimePeriod timePeriod) { + this.timePeriod = timePeriod; + return this; + } + + public AlertConfigurationTimePeriod getTimePeriod() { + return timePeriod; + } + + public AlertConfigurationPb setTriggerType(AlertConfigurationTriggerType triggerType) { + this.triggerType = triggerType; + return this; + } + + public AlertConfigurationTriggerType getTriggerType() { + return triggerType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertConfigurationPb that = (AlertConfigurationPb) o; + return Objects.equals(actionConfigurations, that.actionConfigurations) + && Objects.equals(alertConfigurationId, that.alertConfigurationId) + && Objects.equals(quantityThreshold, that.quantityThreshold) + && Objects.equals(quantityType, that.quantityType) + && Objects.equals(timePeriod, that.timePeriod) + && Objects.equals(triggerType, that.triggerType); + } + + @Override + public int hashCode() { + return Objects.hash( + actionConfigurations, + alertConfigurationId, + quantityThreshold, + quantityType, + timePeriod, + triggerType); + } + + @Override + public String toString() { + return new ToStringer(AlertConfigurationPb.class) + .add("actionConfigurations", actionConfigurations) + .add("alertConfigurationId", alertConfigurationId) + .add("quantityThreshold", quantityThreshold) + .add("quantityType", quantityType) + .add("timePeriod", timePeriod) + .add("triggerType", triggerType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java index 47caf60ea..86be891d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java @@ -22,7 +22,7 @@ public DownloadResponse download(DownloadRequest request) { String.format("/api/2.0/accounts/%s/usage/download", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "text/plain"); return apiClient.execute(req, DownloadResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfiguration.java index ca11d4e52..ba5272d9b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfiguration.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = BudgetConfiguration.BudgetConfigurationSerializer.class) +@JsonDeserialize(using = BudgetConfiguration.BudgetConfigurationDeserializer.class) public class BudgetConfiguration { /** Databricks account ID. */ - @JsonProperty("account_id") private String accountId; /** * Alerts to configure when this budget is in a triggered state. Budgets must have exactly one * alert configuration. */ - @JsonProperty("alert_configurations") private Collection alertConfigurations; /** Databricks budget configuration ID. */ - @JsonProperty("budget_configuration_id") private String budgetConfigurationId; /** Creation time of this budget configuration. */ - @JsonProperty("create_time") private Long createTime; /** Human-readable name of budget configuration. Max Length: 128 */ - @JsonProperty("display_name") private String displayName; /** @@ -38,11 +44,9 @@ public class BudgetConfiguration { * scope of what is considered for this budget. Leave empty to include all usage for this account. * All provided filters must be matched for usage to be included. */ - @JsonProperty("filter") private BudgetConfigurationFilter filter; /** Update time of this budget configuration. */ - @JsonProperty("update_time") private Long updateTime; public BudgetConfiguration setAccountId(String accountId) { @@ -147,4 +151,51 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + BudgetConfigurationPb toPb() { + BudgetConfigurationPb pb = new BudgetConfigurationPb(); + pb.setAccountId(accountId); + pb.setAlertConfigurations(alertConfigurations); + pb.setBudgetConfigurationId(budgetConfigurationId); + pb.setCreateTime(createTime); + pb.setDisplayName(displayName); + pb.setFilter(filter); + pb.setUpdateTime(updateTime); + + return pb; + } + + static BudgetConfiguration fromPb(BudgetConfigurationPb pb) { + BudgetConfiguration model = new BudgetConfiguration(); + model.setAccountId(pb.getAccountId()); + model.setAlertConfigurations(pb.getAlertConfigurations()); + model.setBudgetConfigurationId(pb.getBudgetConfigurationId()); + model.setCreateTime(pb.getCreateTime()); + model.setDisplayName(pb.getDisplayName()); + model.setFilter(pb.getFilter()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class BudgetConfigurationSerializer extends JsonSerializer { + @Override + public void serialize(BudgetConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BudgetConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetConfigurationDeserializer + extends JsonDeserializer { + @Override + public BudgetConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetConfigurationPb pb = mapper.readValue(p, BudgetConfigurationPb.class); + return BudgetConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilter.java index ad63fbeb8..840f6f494 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilter.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = BudgetConfigurationFilter.BudgetConfigurationFilterSerializer.class) +@JsonDeserialize(using = BudgetConfigurationFilter.BudgetConfigurationFilterDeserializer.class) public class BudgetConfigurationFilter { /** * A list of tag keys and values that will limit the budget to usage that includes those specific * custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage * data. */ - @JsonProperty("tags") private Collection tags; /** If provided, usage must match with the provided Databricks workspace IDs. */ - @JsonProperty("workspace_id") private BudgetConfigurationFilterWorkspaceIdClause workspaceId; public BudgetConfigurationFilter setTags(Collection tags) { @@ -61,4 +70,43 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + BudgetConfigurationFilterPb toPb() { + BudgetConfigurationFilterPb pb = new BudgetConfigurationFilterPb(); + pb.setTags(tags); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static BudgetConfigurationFilter fromPb(BudgetConfigurationFilterPb pb) { + BudgetConfigurationFilter model = new BudgetConfigurationFilter(); + model.setTags(pb.getTags()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class BudgetConfigurationFilterSerializer + extends JsonSerializer { + @Override + public void serialize( + BudgetConfigurationFilter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BudgetConfigurationFilterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetConfigurationFilterDeserializer + extends JsonDeserializer { + @Override + public BudgetConfigurationFilter deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetConfigurationFilterPb pb = mapper.readValue(p, BudgetConfigurationFilterPb.class); + return BudgetConfigurationFilter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClause.java index d8bfa629f..f8529fc73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClause.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClause.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = BudgetConfigurationFilterClause.BudgetConfigurationFilterClauseSerializer.class) +@JsonDeserialize( + using = BudgetConfigurationFilterClause.BudgetConfigurationFilterClauseDeserializer.class) public class BudgetConfigurationFilterClause { /** */ - @JsonProperty("operator") private BudgetConfigurationFilterOperator operator; /** */ - @JsonProperty("values") private Collection values; public BudgetConfigurationFilterClause setOperator(BudgetConfigurationFilterOperator operator) { @@ -56,4 +67,44 @@ public String toString() { .add("values", values) .toString(); } + + BudgetConfigurationFilterClausePb toPb() { + BudgetConfigurationFilterClausePb pb = new BudgetConfigurationFilterClausePb(); + pb.setOperator(operator); + pb.setValues(values); + + return pb; + } + + static BudgetConfigurationFilterClause fromPb(BudgetConfigurationFilterClausePb pb) { + BudgetConfigurationFilterClause model = new BudgetConfigurationFilterClause(); + model.setOperator(pb.getOperator()); + model.setValues(pb.getValues()); + + return model; + } + + public static class BudgetConfigurationFilterClauseSerializer + extends JsonSerializer { + @Override + public void serialize( + BudgetConfigurationFilterClause value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BudgetConfigurationFilterClausePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetConfigurationFilterClauseDeserializer + extends JsonDeserializer { + @Override + public BudgetConfigurationFilterClause deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetConfigurationFilterClausePb pb = + mapper.readValue(p, BudgetConfigurationFilterClausePb.class); + return BudgetConfigurationFilterClause.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClausePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClausePb.java new file mode 100755 index 000000000..ff9fa29e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterClausePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BudgetConfigurationFilterClausePb { + @JsonProperty("operator") + private BudgetConfigurationFilterOperator operator; + + @JsonProperty("values") + private Collection values; + + public BudgetConfigurationFilterClausePb setOperator(BudgetConfigurationFilterOperator operator) { + this.operator = operator; + return this; + } + + public BudgetConfigurationFilterOperator getOperator() { + return operator; + } + + public BudgetConfigurationFilterClausePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetConfigurationFilterClausePb that = (BudgetConfigurationFilterClausePb) o; + return Objects.equals(operator, that.operator) && Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(operator, values); + } + + @Override + public String toString() { + return new ToStringer(BudgetConfigurationFilterClausePb.class) + .add("operator", operator) + .add("values", values) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterPb.java new file mode 100755 index 000000000..7bec2c117 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BudgetConfigurationFilterPb { + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("workspace_id") + private BudgetConfigurationFilterWorkspaceIdClause workspaceId; + + public BudgetConfigurationFilterPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public BudgetConfigurationFilterPb setWorkspaceId( + BudgetConfigurationFilterWorkspaceIdClause workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public BudgetConfigurationFilterWorkspaceIdClause getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetConfigurationFilterPb that = (BudgetConfigurationFilterPb) o; + return Objects.equals(tags, that.tags) && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(tags, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(BudgetConfigurationFilterPb.class) + .add("tags", tags) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClause.java index 862952af8..dcbbd69cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClause.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClause.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = BudgetConfigurationFilterTagClause.BudgetConfigurationFilterTagClauseSerializer.class) +@JsonDeserialize( + using = BudgetConfigurationFilterTagClause.BudgetConfigurationFilterTagClauseDeserializer.class) public class BudgetConfigurationFilterTagClause { /** */ - @JsonProperty("key") private String key; /** */ - @JsonProperty("value") private BudgetConfigurationFilterClause value; public BudgetConfigurationFilterTagClause setKey(String key) { @@ -55,4 +66,44 @@ public String toString() { .add("value", value) .toString(); } + + BudgetConfigurationFilterTagClausePb toPb() { + BudgetConfigurationFilterTagClausePb pb = new BudgetConfigurationFilterTagClausePb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static BudgetConfigurationFilterTagClause fromPb(BudgetConfigurationFilterTagClausePb pb) { + BudgetConfigurationFilterTagClause model = new BudgetConfigurationFilterTagClause(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class BudgetConfigurationFilterTagClauseSerializer + extends JsonSerializer { + @Override + public void serialize( + BudgetConfigurationFilterTagClause value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BudgetConfigurationFilterTagClausePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetConfigurationFilterTagClauseDeserializer + extends JsonDeserializer { + @Override + public BudgetConfigurationFilterTagClause deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetConfigurationFilterTagClausePb pb = + mapper.readValue(p, BudgetConfigurationFilterTagClausePb.class); + return BudgetConfigurationFilterTagClause.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClausePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClausePb.java new file mode 100755 index 000000000..d5be6d53a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterTagClausePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class BudgetConfigurationFilterTagClausePb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private BudgetConfigurationFilterClause value; + + public BudgetConfigurationFilterTagClausePb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public BudgetConfigurationFilterTagClausePb setValue(BudgetConfigurationFilterClause value) { + this.value = value; + return this; + } + + public BudgetConfigurationFilterClause getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetConfigurationFilterTagClausePb that = (BudgetConfigurationFilterTagClausePb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(BudgetConfigurationFilterTagClausePb.class) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClause.java index 989017d67..2f6c893f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClause.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClause.java @@ -4,18 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + BudgetConfigurationFilterWorkspaceIdClause + .BudgetConfigurationFilterWorkspaceIdClauseSerializer.class) +@JsonDeserialize( + using = + BudgetConfigurationFilterWorkspaceIdClause + .BudgetConfigurationFilterWorkspaceIdClauseDeserializer.class) public class BudgetConfigurationFilterWorkspaceIdClause { /** */ - @JsonProperty("operator") private BudgetConfigurationFilterOperator operator; /** */ - @JsonProperty("values") private Collection values; public BudgetConfigurationFilterWorkspaceIdClause setOperator( @@ -58,4 +73,49 @@ public String toString() { .add("values", values) .toString(); } + + BudgetConfigurationFilterWorkspaceIdClausePb toPb() { + BudgetConfigurationFilterWorkspaceIdClausePb pb = + new BudgetConfigurationFilterWorkspaceIdClausePb(); + pb.setOperator(operator); + pb.setValues(values); + + return pb; + } + + static BudgetConfigurationFilterWorkspaceIdClause fromPb( + BudgetConfigurationFilterWorkspaceIdClausePb pb) { + BudgetConfigurationFilterWorkspaceIdClause model = + new BudgetConfigurationFilterWorkspaceIdClause(); + model.setOperator(pb.getOperator()); + model.setValues(pb.getValues()); + + return model; + } + + public static class BudgetConfigurationFilterWorkspaceIdClauseSerializer + extends JsonSerializer { + @Override + public void serialize( + BudgetConfigurationFilterWorkspaceIdClause value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + BudgetConfigurationFilterWorkspaceIdClausePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetConfigurationFilterWorkspaceIdClauseDeserializer + extends JsonDeserializer { + @Override + public BudgetConfigurationFilterWorkspaceIdClause deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetConfigurationFilterWorkspaceIdClausePb pb = + mapper.readValue(p, BudgetConfigurationFilterWorkspaceIdClausePb.class); + return BudgetConfigurationFilterWorkspaceIdClause.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClausePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClausePb.java new file mode 100755 index 000000000..ba632093a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationFilterWorkspaceIdClausePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BudgetConfigurationFilterWorkspaceIdClausePb { + @JsonProperty("operator") + private BudgetConfigurationFilterOperator operator; + + @JsonProperty("values") + private Collection values; + + public BudgetConfigurationFilterWorkspaceIdClausePb setOperator( + BudgetConfigurationFilterOperator operator) { + this.operator = operator; + return this; + } + + public BudgetConfigurationFilterOperator getOperator() { + return operator; + } + + public BudgetConfigurationFilterWorkspaceIdClausePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetConfigurationFilterWorkspaceIdClausePb that = + (BudgetConfigurationFilterWorkspaceIdClausePb) o; + return Objects.equals(operator, that.operator) && Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(operator, values); + } + + @Override + public String toString() { + return new ToStringer(BudgetConfigurationFilterWorkspaceIdClausePb.class) + .add("operator", operator) + .add("values", values) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationPb.java new file mode 100755 index 000000000..11730a24f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetConfigurationPb.java @@ -0,0 +1,136 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BudgetConfigurationPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("alert_configurations") + private Collection alertConfigurations; + + @JsonProperty("budget_configuration_id") + private String budgetConfigurationId; + + @JsonProperty("create_time") + private Long createTime; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("filter") + private BudgetConfigurationFilter filter; + + @JsonProperty("update_time") + private Long updateTime; + + public BudgetConfigurationPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public BudgetConfigurationPb setAlertConfigurations( + Collection alertConfigurations) { + this.alertConfigurations = alertConfigurations; + return this; + } + + public Collection getAlertConfigurations() { + return alertConfigurations; + } + + public BudgetConfigurationPb setBudgetConfigurationId(String budgetConfigurationId) { + this.budgetConfigurationId = budgetConfigurationId; + return this; + } + + public String getBudgetConfigurationId() { + return budgetConfigurationId; + } + + public BudgetConfigurationPb setCreateTime(Long createTime) { + this.createTime = createTime; + return this; + } + + public Long getCreateTime() { + return createTime; + } + + public BudgetConfigurationPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public BudgetConfigurationPb setFilter(BudgetConfigurationFilter filter) { + this.filter = filter; + return this; + } + + public BudgetConfigurationFilter getFilter() { + return filter; + } + + public BudgetConfigurationPb setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + return this; + } + + public Long getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetConfigurationPb that = (BudgetConfigurationPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(alertConfigurations, that.alertConfigurations) + && Objects.equals(budgetConfigurationId, that.budgetConfigurationId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(displayName, that.displayName) + && Objects.equals(filter, that.filter) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + alertConfigurations, + budgetConfigurationId, + createTime, + displayName, + filter, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(BudgetConfigurationPb.class) + .add("accountId", accountId) + .add("alertConfigurations", alertConfigurations) + .add("budgetConfigurationId", budgetConfigurationId) + .add("createTime", createTime) + .add("displayName", displayName) + .add("filter", filter) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java index 66355b115..53d3828aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java @@ -4,26 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Contains the BudgetPolicy details. */ @Generated +@JsonSerialize(using = BudgetPolicy.BudgetPolicySerializer.class) +@JsonDeserialize(using = BudgetPolicy.BudgetPolicyDeserializer.class) public class BudgetPolicy { /** * List of workspaces that this budget policy will be exclusively bound to. An empty binding * implies that this budget policy is open to any workspace in the account. */ - @JsonProperty("binding_workspace_ids") private Collection bindingWorkspaceIds; /** A list of tags defined by the customer. At most 20 entries are allowed per policy. */ - @JsonProperty("custom_tags") private Collection customTags; /** The Id of the policy. This field is generated by Databricks and globally unique. */ - @JsonProperty("policy_id") private String policyId; /** @@ -31,7 +39,6 @@ public class BudgetPolicy { * from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as * `databricks:default-policy`. */ - @JsonProperty("policy_name") private String policyName; public BudgetPolicy setBindingWorkspaceIds(Collection bindingWorkspaceIds) { @@ -96,4 +103,43 @@ public String toString() { .add("policyName", policyName) .toString(); } + + BudgetPolicyPb toPb() { + BudgetPolicyPb pb = new BudgetPolicyPb(); + pb.setBindingWorkspaceIds(bindingWorkspaceIds); + pb.setCustomTags(customTags); + pb.setPolicyId(policyId); + pb.setPolicyName(policyName); + + return pb; + } + + static BudgetPolicy fromPb(BudgetPolicyPb pb) { + BudgetPolicy model = new BudgetPolicy(); + model.setBindingWorkspaceIds(pb.getBindingWorkspaceIds()); + model.setCustomTags(pb.getCustomTags()); + model.setPolicyId(pb.getPolicyId()); + model.setPolicyName(pb.getPolicyName()); + + return model; + } + + public static class BudgetPolicySerializer extends JsonSerializer { + @Override + public void serialize(BudgetPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BudgetPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BudgetPolicyDeserializer extends JsonDeserializer { + @Override + public BudgetPolicy deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BudgetPolicyPb pb = mapper.readValue(p, BudgetPolicyPb.class); + return BudgetPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java index dcf84cf42..7aede02b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java @@ -22,7 +22,7 @@ public BudgetPolicy create(CreateBudgetPolicyRequest request) { String.format("/api/2.1/accounts/%s/budget-policies", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, BudgetPolicy.class); @@ -39,7 +39,7 @@ public void delete(DeleteBudgetPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public BudgetPolicy get(GetBudgetPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, BudgetPolicy.class); } catch (IOException e) { @@ -69,7 +69,7 @@ public ListBudgetPoliciesResponse list(ListBudgetPoliciesRequest request) { String.format("/api/2.1/accounts/%s/budget-policies", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListBudgetPoliciesResponse.class); } catch (IOException e) { @@ -85,7 +85,7 @@ public BudgetPolicy update(UpdateBudgetPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, BudgetPolicy.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyPb.java new file mode 100755 index 000000000..01a83f888 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyPb.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Contains the BudgetPolicy details. */ +@Generated +class BudgetPolicyPb { + @JsonProperty("binding_workspace_ids") + private Collection bindingWorkspaceIds; + + @JsonProperty("custom_tags") + private Collection customTags; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("policy_name") + private String policyName; + + public BudgetPolicyPb setBindingWorkspaceIds(Collection bindingWorkspaceIds) { + this.bindingWorkspaceIds = bindingWorkspaceIds; + return this; + } + + public Collection getBindingWorkspaceIds() { + return bindingWorkspaceIds; + } + + public BudgetPolicyPb setCustomTags( + Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public BudgetPolicyPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public BudgetPolicyPb setPolicyName(String policyName) { + this.policyName = policyName; + return this; + } + + public String getPolicyName() { + return policyName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BudgetPolicyPb that = (BudgetPolicyPb) o; + return Objects.equals(bindingWorkspaceIds, that.bindingWorkspaceIds) + && Objects.equals(customTags, that.customTags) + && Objects.equals(policyId, that.policyId) + && Objects.equals(policyName, that.policyName); + } + + @Override + public int hashCode() { + return Objects.hash(bindingWorkspaceIds, customTags, policyId, policyName); + } + + @Override + public String toString() { + return new ToStringer(BudgetPolicyPb.class) + .add("bindingWorkspaceIds", bindingWorkspaceIds) + .add("customTags", customTags) + .add("policyId", policyId) + .add("policyName", policyName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java index 7bf0119d8..45ce98a55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java @@ -21,7 +21,7 @@ public CreateBudgetConfigurationResponse create(CreateBudgetConfigurationRequest String path = String.format("/api/2.1/accounts/%s/budgets", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateBudgetConfigurationResponse.class); @@ -38,7 +38,7 @@ public void delete(DeleteBudgetConfigurationRequest request) { apiClient.configuredAccountID(), request.getBudgetId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteBudgetConfigurationResponse.class); } catch (IOException e) { @@ -54,7 +54,7 @@ public GetBudgetConfigurationResponse get(GetBudgetConfigurationRequest request) apiClient.configuredAccountID(), request.getBudgetId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetBudgetConfigurationResponse.class); } catch (IOException e) { @@ -67,7 +67,7 @@ public ListBudgetConfigurationsResponse list(ListBudgetConfigurationsRequest req String path = String.format("/api/2.1/accounts/%s/budgets", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListBudgetConfigurationsResponse.class); } catch (IOException e) { @@ -83,7 +83,7 @@ public UpdateBudgetConfigurationResponse update(UpdateBudgetConfigurationRequest apiClient.configuredAccountID(), request.getBudgetId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateBudgetConfigurationResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Converters.java new file mode 100755 index 000000000..5e007e450 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.billing; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java index 60f8d7e60..56e23f87c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateBillingUsageDashboardRequest.CreateBillingUsageDashboardRequestSerializer.class) +@JsonDeserialize( + using = CreateBillingUsageDashboardRequest.CreateBillingUsageDashboardRequestDeserializer.class) public class CreateBillingUsageDashboardRequest { /** * Workspace level usage dashboard shows usage data for the specified workspace ID. Global level * usage dashboard shows usage data for all workspaces in the account. */ - @JsonProperty("dashboard_type") private UsageDashboardType dashboardType; /** The workspace ID of the workspace in which the usage dashboard is created. */ - @JsonProperty("workspace_id") private Long workspaceId; public CreateBillingUsageDashboardRequest setDashboardType(UsageDashboardType dashboardType) { @@ -59,4 +70,44 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + CreateBillingUsageDashboardRequestPb toPb() { + CreateBillingUsageDashboardRequestPb pb = new CreateBillingUsageDashboardRequestPb(); + pb.setDashboardType(dashboardType); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static CreateBillingUsageDashboardRequest fromPb(CreateBillingUsageDashboardRequestPb pb) { + CreateBillingUsageDashboardRequest model = new CreateBillingUsageDashboardRequest(); + model.setDashboardType(pb.getDashboardType()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class CreateBillingUsageDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBillingUsageDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBillingUsageDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBillingUsageDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public CreateBillingUsageDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBillingUsageDashboardRequestPb pb = + mapper.readValue(p, CreateBillingUsageDashboardRequestPb.class); + return CreateBillingUsageDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequestPb.java new file mode 100755 index 000000000..2b1d2ecb0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateBillingUsageDashboardRequestPb { + @JsonProperty("dashboard_type") + private UsageDashboardType dashboardType; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public CreateBillingUsageDashboardRequestPb setDashboardType(UsageDashboardType dashboardType) { + this.dashboardType = dashboardType; + return this; + } + + public UsageDashboardType getDashboardType() { + return dashboardType; + } + + public CreateBillingUsageDashboardRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBillingUsageDashboardRequestPb that = (CreateBillingUsageDashboardRequestPb) o; + return Objects.equals(dashboardType, that.dashboardType) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardType, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(CreateBillingUsageDashboardRequestPb.class) + .add("dashboardType", dashboardType) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponse.java index f70b18fb1..9bec43aef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponse.java @@ -4,13 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateBillingUsageDashboardResponse.CreateBillingUsageDashboardResponseSerializer.class) +@JsonDeserialize( + using = + CreateBillingUsageDashboardResponse.CreateBillingUsageDashboardResponseDeserializer.class) public class CreateBillingUsageDashboardResponse { /** The unique id of the usage dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; public CreateBillingUsageDashboardResponse setDashboardId(String dashboardId) { @@ -41,4 +54,42 @@ public String toString() { .add("dashboardId", dashboardId) .toString(); } + + CreateBillingUsageDashboardResponsePb toPb() { + CreateBillingUsageDashboardResponsePb pb = new CreateBillingUsageDashboardResponsePb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static CreateBillingUsageDashboardResponse fromPb(CreateBillingUsageDashboardResponsePb pb) { + CreateBillingUsageDashboardResponse model = new CreateBillingUsageDashboardResponse(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class CreateBillingUsageDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBillingUsageDashboardResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBillingUsageDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBillingUsageDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public CreateBillingUsageDashboardResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBillingUsageDashboardResponsePb pb = + mapper.readValue(p, CreateBillingUsageDashboardResponsePb.class); + return CreateBillingUsageDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponsePb.java new file mode 100755 index 000000000..6394ce9ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateBillingUsageDashboardResponsePb { + @JsonProperty("dashboard_id") + private String dashboardId; + + public CreateBillingUsageDashboardResponsePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBillingUsageDashboardResponsePb that = (CreateBillingUsageDashboardResponsePb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(CreateBillingUsageDashboardResponsePb.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudget.java index ef5163ca8..1ec063c09 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudget.java @@ -4,25 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateBudgetConfigurationBudget.CreateBudgetConfigurationBudgetSerializer.class) +@JsonDeserialize( + using = CreateBudgetConfigurationBudget.CreateBudgetConfigurationBudgetDeserializer.class) public class CreateBudgetConfigurationBudget { /** Databricks account ID. */ - @JsonProperty("account_id") private String accountId; /** * Alerts to configure when this budget is in a triggered state. Budgets must have exactly one * alert configuration. */ - @JsonProperty("alert_configurations") private Collection alertConfigurations; /** Human-readable name of budget configuration. Max Length: 128 */ - @JsonProperty("display_name") private String displayName; /** @@ -30,7 +40,6 @@ public class CreateBudgetConfigurationBudget { * scope of what is considered for this budget. Leave empty to include all usage for this account. * All provided filters must be matched for usage to be included. */ - @JsonProperty("filter") private BudgetConfigurationFilter filter; public CreateBudgetConfigurationBudget setAccountId(String accountId) { @@ -95,4 +104,48 @@ public String toString() { .add("filter", filter) .toString(); } + + CreateBudgetConfigurationBudgetPb toPb() { + CreateBudgetConfigurationBudgetPb pb = new CreateBudgetConfigurationBudgetPb(); + pb.setAccountId(accountId); + pb.setAlertConfigurations(alertConfigurations); + pb.setDisplayName(displayName); + pb.setFilter(filter); + + return pb; + } + + static CreateBudgetConfigurationBudget fromPb(CreateBudgetConfigurationBudgetPb pb) { + CreateBudgetConfigurationBudget model = new CreateBudgetConfigurationBudget(); + model.setAccountId(pb.getAccountId()); + model.setAlertConfigurations(pb.getAlertConfigurations()); + model.setDisplayName(pb.getDisplayName()); + model.setFilter(pb.getFilter()); + + return model; + } + + public static class CreateBudgetConfigurationBudgetSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetConfigurationBudget value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBudgetConfigurationBudgetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetConfigurationBudgetDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetConfigurationBudget deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetConfigurationBudgetPb pb = + mapper.readValue(p, CreateBudgetConfigurationBudgetPb.class); + return CreateBudgetConfigurationBudget.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurations.java index 58641fec7..48ad9dccc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurations.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurations.java @@ -4,17 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateBudgetConfigurationBudgetActionConfigurations + .CreateBudgetConfigurationBudgetActionConfigurationsSerializer.class) +@JsonDeserialize( + using = + CreateBudgetConfigurationBudgetActionConfigurations + .CreateBudgetConfigurationBudgetActionConfigurationsDeserializer.class) public class CreateBudgetConfigurationBudgetActionConfigurations { /** The type of the action. */ - @JsonProperty("action_type") private ActionConfigurationType actionType; /** Target for the action. For example, an email address. */ - @JsonProperty("target") private String target; public CreateBudgetConfigurationBudgetActionConfigurations setActionType( @@ -57,4 +72,49 @@ public String toString() { .add("target", target) .toString(); } + + CreateBudgetConfigurationBudgetActionConfigurationsPb toPb() { + CreateBudgetConfigurationBudgetActionConfigurationsPb pb = + new CreateBudgetConfigurationBudgetActionConfigurationsPb(); + pb.setActionType(actionType); + pb.setTarget(target); + + return pb; + } + + static CreateBudgetConfigurationBudgetActionConfigurations fromPb( + CreateBudgetConfigurationBudgetActionConfigurationsPb pb) { + CreateBudgetConfigurationBudgetActionConfigurations model = + new CreateBudgetConfigurationBudgetActionConfigurations(); + model.setActionType(pb.getActionType()); + model.setTarget(pb.getTarget()); + + return model; + } + + public static class CreateBudgetConfigurationBudgetActionConfigurationsSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetConfigurationBudgetActionConfigurations value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateBudgetConfigurationBudgetActionConfigurationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetConfigurationBudgetActionConfigurationsDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetConfigurationBudgetActionConfigurations deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetConfigurationBudgetActionConfigurationsPb pb = + mapper.readValue(p, CreateBudgetConfigurationBudgetActionConfigurationsPb.class); + return CreateBudgetConfigurationBudgetActionConfigurations.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurationsPb.java new file mode 100755 index 000000000..bb4077cef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetActionConfigurationsPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateBudgetConfigurationBudgetActionConfigurationsPb { + @JsonProperty("action_type") + private ActionConfigurationType actionType; + + @JsonProperty("target") + private String target; + + public CreateBudgetConfigurationBudgetActionConfigurationsPb setActionType( + ActionConfigurationType actionType) { + this.actionType = actionType; + return this; + } + + public ActionConfigurationType getActionType() { + return actionType; + } + + public CreateBudgetConfigurationBudgetActionConfigurationsPb setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetConfigurationBudgetActionConfigurationsPb that = + (CreateBudgetConfigurationBudgetActionConfigurationsPb) o; + return Objects.equals(actionType, that.actionType) && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(actionType, target); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetConfigurationBudgetActionConfigurationsPb.class) + .add("actionType", actionType) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurations.java index 4adf795ea..2d0a035ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurations.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurations.java @@ -4,39 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateBudgetConfigurationBudgetAlertConfigurations + .CreateBudgetConfigurationBudgetAlertConfigurationsSerializer.class) +@JsonDeserialize( + using = + CreateBudgetConfigurationBudgetAlertConfigurations + .CreateBudgetConfigurationBudgetAlertConfigurationsDeserializer.class) public class CreateBudgetConfigurationBudgetAlertConfigurations { /** * Configured actions for this alert. These define what happens when an alert enters a triggered * state. */ - @JsonProperty("action_configurations") private Collection actionConfigurations; /** * The threshold for the budget alert to determine if it is in a triggered state. The number is * evaluated based on `quantity_type`. */ - @JsonProperty("quantity_threshold") private String quantityThreshold; /** * The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured * in. */ - @JsonProperty("quantity_type") private AlertConfigurationQuantityType quantityType; /** The time window of usage data for the budget. */ - @JsonProperty("time_period") private AlertConfigurationTimePeriod timePeriod; /** The evaluation method to determine when this budget alert is in a triggered state. */ - @JsonProperty("trigger_type") private AlertConfigurationTriggerType triggerType; public CreateBudgetConfigurationBudgetAlertConfigurations setActionConfigurations( @@ -118,4 +130,55 @@ public String toString() { .add("triggerType", triggerType) .toString(); } + + CreateBudgetConfigurationBudgetAlertConfigurationsPb toPb() { + CreateBudgetConfigurationBudgetAlertConfigurationsPb pb = + new CreateBudgetConfigurationBudgetAlertConfigurationsPb(); + pb.setActionConfigurations(actionConfigurations); + pb.setQuantityThreshold(quantityThreshold); + pb.setQuantityType(quantityType); + pb.setTimePeriod(timePeriod); + pb.setTriggerType(triggerType); + + return pb; + } + + static CreateBudgetConfigurationBudgetAlertConfigurations fromPb( + CreateBudgetConfigurationBudgetAlertConfigurationsPb pb) { + CreateBudgetConfigurationBudgetAlertConfigurations model = + new CreateBudgetConfigurationBudgetAlertConfigurations(); + model.setActionConfigurations(pb.getActionConfigurations()); + model.setQuantityThreshold(pb.getQuantityThreshold()); + model.setQuantityType(pb.getQuantityType()); + model.setTimePeriod(pb.getTimePeriod()); + model.setTriggerType(pb.getTriggerType()); + + return model; + } + + public static class CreateBudgetConfigurationBudgetAlertConfigurationsSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetConfigurationBudgetAlertConfigurations value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateBudgetConfigurationBudgetAlertConfigurationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetConfigurationBudgetAlertConfigurationsDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetConfigurationBudgetAlertConfigurations deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetConfigurationBudgetAlertConfigurationsPb pb = + mapper.readValue(p, CreateBudgetConfigurationBudgetAlertConfigurationsPb.class); + return CreateBudgetConfigurationBudgetAlertConfigurations.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurationsPb.java new file mode 100755 index 000000000..2751d3ffe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetAlertConfigurationsPb.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateBudgetConfigurationBudgetAlertConfigurationsPb { + @JsonProperty("action_configurations") + private Collection actionConfigurations; + + @JsonProperty("quantity_threshold") + private String quantityThreshold; + + @JsonProperty("quantity_type") + private AlertConfigurationQuantityType quantityType; + + @JsonProperty("time_period") + private AlertConfigurationTimePeriod timePeriod; + + @JsonProperty("trigger_type") + private AlertConfigurationTriggerType triggerType; + + public CreateBudgetConfigurationBudgetAlertConfigurationsPb setActionConfigurations( + Collection actionConfigurations) { + this.actionConfigurations = actionConfigurations; + return this; + } + + public Collection getActionConfigurations() { + return actionConfigurations; + } + + public CreateBudgetConfigurationBudgetAlertConfigurationsPb setQuantityThreshold( + String quantityThreshold) { + this.quantityThreshold = quantityThreshold; + return this; + } + + public String getQuantityThreshold() { + return quantityThreshold; + } + + public CreateBudgetConfigurationBudgetAlertConfigurationsPb setQuantityType( + AlertConfigurationQuantityType quantityType) { + this.quantityType = quantityType; + return this; + } + + public AlertConfigurationQuantityType getQuantityType() { + return quantityType; + } + + public CreateBudgetConfigurationBudgetAlertConfigurationsPb setTimePeriod( + AlertConfigurationTimePeriod timePeriod) { + this.timePeriod = timePeriod; + return this; + } + + public AlertConfigurationTimePeriod getTimePeriod() { + return timePeriod; + } + + public CreateBudgetConfigurationBudgetAlertConfigurationsPb setTriggerType( + AlertConfigurationTriggerType triggerType) { + this.triggerType = triggerType; + return this; + } + + public AlertConfigurationTriggerType getTriggerType() { + return triggerType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetConfigurationBudgetAlertConfigurationsPb that = + (CreateBudgetConfigurationBudgetAlertConfigurationsPb) o; + return Objects.equals(actionConfigurations, that.actionConfigurations) + && Objects.equals(quantityThreshold, that.quantityThreshold) + && Objects.equals(quantityType, that.quantityType) + && Objects.equals(timePeriod, that.timePeriod) + && Objects.equals(triggerType, that.triggerType); + } + + @Override + public int hashCode() { + return Objects.hash( + actionConfigurations, quantityThreshold, quantityType, timePeriod, triggerType); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetConfigurationBudgetAlertConfigurationsPb.class) + .add("actionConfigurations", actionConfigurations) + .add("quantityThreshold", quantityThreshold) + .add("quantityType", quantityType) + .add("timePeriod", timePeriod) + .add("triggerType", triggerType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetPb.java new file mode 100755 index 000000000..729c056ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationBudgetPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateBudgetConfigurationBudgetPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("alert_configurations") + private Collection alertConfigurations; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("filter") + private BudgetConfigurationFilter filter; + + public CreateBudgetConfigurationBudgetPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CreateBudgetConfigurationBudgetPb setAlertConfigurations( + Collection alertConfigurations) { + this.alertConfigurations = alertConfigurations; + return this; + } + + public Collection getAlertConfigurations() { + return alertConfigurations; + } + + public CreateBudgetConfigurationBudgetPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateBudgetConfigurationBudgetPb setFilter(BudgetConfigurationFilter filter) { + this.filter = filter; + return this; + } + + public BudgetConfigurationFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetConfigurationBudgetPb that = (CreateBudgetConfigurationBudgetPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(alertConfigurations, that.alertConfigurations) + && Objects.equals(displayName, that.displayName) + && Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(accountId, alertConfigurations, displayName, filter); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetConfigurationBudgetPb.class) + .add("accountId", accountId) + .add("alertConfigurations", alertConfigurations) + .add("displayName", displayName) + .add("filter", filter) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequest.java index a0203c1ba..c93fa363f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequest.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateBudgetConfigurationRequest.CreateBudgetConfigurationRequestSerializer.class) +@JsonDeserialize( + using = CreateBudgetConfigurationRequest.CreateBudgetConfigurationRequestDeserializer.class) public class CreateBudgetConfigurationRequest { /** Properties of the new budget configuration. */ - @JsonProperty("budget") private CreateBudgetConfigurationBudget budget; public CreateBudgetConfigurationRequest setBudget(CreateBudgetConfigurationBudget budget) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(CreateBudgetConfigurationRequest.class).add("budget", budget).toString(); } + + CreateBudgetConfigurationRequestPb toPb() { + CreateBudgetConfigurationRequestPb pb = new CreateBudgetConfigurationRequestPb(); + pb.setBudget(budget); + + return pb; + } + + static CreateBudgetConfigurationRequest fromPb(CreateBudgetConfigurationRequestPb pb) { + CreateBudgetConfigurationRequest model = new CreateBudgetConfigurationRequest(); + model.setBudget(pb.getBudget()); + + return model; + } + + public static class CreateBudgetConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetConfigurationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBudgetConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetConfigurationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetConfigurationRequestPb pb = + mapper.readValue(p, CreateBudgetConfigurationRequestPb.class); + return CreateBudgetConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequestPb.java new file mode 100755 index 000000000..a7caf3eb8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateBudgetConfigurationRequestPb { + @JsonProperty("budget") + private CreateBudgetConfigurationBudget budget; + + public CreateBudgetConfigurationRequestPb setBudget(CreateBudgetConfigurationBudget budget) { + this.budget = budget; + return this; + } + + public CreateBudgetConfigurationBudget getBudget() { + return budget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetConfigurationRequestPb that = (CreateBudgetConfigurationRequestPb) o; + return Objects.equals(budget, that.budget); + } + + @Override + public int hashCode() { + return Objects.hash(budget); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetConfigurationRequestPb.class) + .add("budget", budget) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java index 0ef4cbf60..4d800bcaf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateBudgetConfigurationResponse.CreateBudgetConfigurationResponseSerializer.class) +@JsonDeserialize( + using = CreateBudgetConfigurationResponse.CreateBudgetConfigurationResponseDeserializer.class) public class CreateBudgetConfigurationResponse { /** The created budget configuration. */ - @JsonProperty("budget") private BudgetConfiguration budget; public CreateBudgetConfigurationResponse setBudget(BudgetConfiguration budget) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(CreateBudgetConfigurationResponse.class).add("budget", budget).toString(); } + + CreateBudgetConfigurationResponsePb toPb() { + CreateBudgetConfigurationResponsePb pb = new CreateBudgetConfigurationResponsePb(); + pb.setBudget(budget); + + return pb; + } + + static CreateBudgetConfigurationResponse fromPb(CreateBudgetConfigurationResponsePb pb) { + CreateBudgetConfigurationResponse model = new CreateBudgetConfigurationResponse(); + model.setBudget(pb.getBudget()); + + return model; + } + + public static class CreateBudgetConfigurationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetConfigurationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBudgetConfigurationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetConfigurationResponseDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetConfigurationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetConfigurationResponsePb pb = + mapper.readValue(p, CreateBudgetConfigurationResponsePb.class); + return CreateBudgetConfigurationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponsePb.java new file mode 100755 index 000000000..f882b46c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateBudgetConfigurationResponsePb { + @JsonProperty("budget") + private BudgetConfiguration budget; + + public CreateBudgetConfigurationResponsePb setBudget(BudgetConfiguration budget) { + this.budget = budget; + return this; + } + + public BudgetConfiguration getBudget() { + return budget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetConfigurationResponsePb that = (CreateBudgetConfigurationResponsePb) o; + return Objects.equals(budget, that.budget); + } + + @Override + public int hashCode() { + return Objects.hash(budget); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetConfigurationResponsePb.class) + .add("budget", budget) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java index 2ed87cd0e..2f22c3730 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A request to create a BudgetPolicy. */ @Generated +@JsonSerialize(using = CreateBudgetPolicyRequest.CreateBudgetPolicyRequestSerializer.class) +@JsonDeserialize(using = CreateBudgetPolicyRequest.CreateBudgetPolicyRequestDeserializer.class) public class CreateBudgetPolicyRequest { /** * The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must * be provided, custom_tags may need to be provided depending on the cloud provider. All other * fields are optional. */ - @JsonProperty("policy") private BudgetPolicy policy; /** * A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is * recommended. This request is only idempotent if a `request_id` is provided. */ - @JsonProperty("request_id") private String requestId; public CreateBudgetPolicyRequest setPolicy(BudgetPolicy policy) { @@ -63,4 +72,43 @@ public String toString() { .add("requestId", requestId) .toString(); } + + CreateBudgetPolicyRequestPb toPb() { + CreateBudgetPolicyRequestPb pb = new CreateBudgetPolicyRequestPb(); + pb.setPolicy(policy); + pb.setRequestId(requestId); + + return pb; + } + + static CreateBudgetPolicyRequest fromPb(CreateBudgetPolicyRequestPb pb) { + CreateBudgetPolicyRequest model = new CreateBudgetPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setRequestId(pb.getRequestId()); + + return model; + } + + public static class CreateBudgetPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateBudgetPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateBudgetPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateBudgetPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateBudgetPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateBudgetPolicyRequestPb pb = mapper.readValue(p, CreateBudgetPolicyRequestPb.class); + return CreateBudgetPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequestPb.java new file mode 100755 index 000000000..73dc45423 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A request to create a BudgetPolicy. */ +@Generated +class CreateBudgetPolicyRequestPb { + @JsonProperty("policy") + private BudgetPolicy policy; + + @JsonProperty("request_id") + private String requestId; + + public CreateBudgetPolicyRequestPb setPolicy(BudgetPolicy policy) { + this.policy = policy; + return this; + } + + public BudgetPolicy getPolicy() { + return policy; + } + + public CreateBudgetPolicyRequestPb setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBudgetPolicyRequestPb that = (CreateBudgetPolicyRequestPb) o; + return Objects.equals(policy, that.policy) && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(policy, requestId); + } + + @Override + public String toString() { + return new ToStringer(CreateBudgetPolicyRequestPb.class) + .add("policy", policy) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java index 895258dbe..e669507f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java @@ -4,14 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateLogDeliveryConfigurationParams.CreateLogDeliveryConfigurationParamsSerializer.class) +@JsonDeserialize( + using = + CreateLogDeliveryConfigurationParams.CreateLogDeliveryConfigurationParamsDeserializer.class) public class CreateLogDeliveryConfigurationParams { /** The optional human-readable name of the log delivery configuration. Defaults to empty. */ - @JsonProperty("config_name") private String configName; /** @@ -22,7 +36,6 @@ public class CreateLogDeliveryConfigurationParams { *

[Configure billable usage delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("credentials_id") private String credentialsId; /** @@ -30,7 +43,6 @@ public class CreateLogDeliveryConfigurationParams { * logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not * start or end with a slash character. */ - @JsonProperty("delivery_path_prefix") private String deliveryPathPrefix; /** @@ -38,7 +50,6 @@ public class CreateLogDeliveryConfigurationParams { * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`). */ - @JsonProperty("delivery_start_time") private String deliveryStartTime; /** @@ -58,7 +69,6 @@ public class CreateLogDeliveryConfigurationParams { * usage log delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("log_type") private LogType logType; /** @@ -73,7 +83,6 @@ public class CreateLogDeliveryConfigurationParams { * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View * billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html */ - @JsonProperty("output_format") private OutputFormat outputFormat; /** @@ -82,7 +91,6 @@ public class CreateLogDeliveryConfigurationParams { * configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration * is not supported, so disable a log delivery configuration that is no longer needed. */ - @JsonProperty("status") private LogDeliveryConfigStatus status; /** @@ -93,7 +101,6 @@ public class CreateLogDeliveryConfigurationParams { *

[Configure billable usage delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** @@ -108,7 +115,6 @@ public class CreateLogDeliveryConfigurationParams { * types of Databricks deployments there is only one workspace per account ID, so this field is * unnecessary. */ - @JsonProperty("workspace_ids_filter") private Collection workspaceIdsFilter; public CreateLogDeliveryConfigurationParams setConfigName(String configName) { @@ -238,4 +244,58 @@ public String toString() { .add("workspaceIdsFilter", workspaceIdsFilter) .toString(); } + + CreateLogDeliveryConfigurationParamsPb toPb() { + CreateLogDeliveryConfigurationParamsPb pb = new CreateLogDeliveryConfigurationParamsPb(); + pb.setConfigName(configName); + pb.setCredentialsId(credentialsId); + pb.setDeliveryPathPrefix(deliveryPathPrefix); + pb.setDeliveryStartTime(deliveryStartTime); + pb.setLogType(logType); + pb.setOutputFormat(outputFormat); + pb.setStatus(status); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setWorkspaceIdsFilter(workspaceIdsFilter); + + return pb; + } + + static CreateLogDeliveryConfigurationParams fromPb(CreateLogDeliveryConfigurationParamsPb pb) { + CreateLogDeliveryConfigurationParams model = new CreateLogDeliveryConfigurationParams(); + model.setConfigName(pb.getConfigName()); + model.setCredentialsId(pb.getCredentialsId()); + model.setDeliveryPathPrefix(pb.getDeliveryPathPrefix()); + model.setDeliveryStartTime(pb.getDeliveryStartTime()); + model.setLogType(pb.getLogType()); + model.setOutputFormat(pb.getOutputFormat()); + model.setStatus(pb.getStatus()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setWorkspaceIdsFilter(pb.getWorkspaceIdsFilter()); + + return model; + } + + public static class CreateLogDeliveryConfigurationParamsSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateLogDeliveryConfigurationParams value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateLogDeliveryConfigurationParamsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateLogDeliveryConfigurationParamsDeserializer + extends JsonDeserializer { + @Override + public CreateLogDeliveryConfigurationParams deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateLogDeliveryConfigurationParamsPb pb = + mapper.readValue(p, CreateLogDeliveryConfigurationParamsPb.class); + return CreateLogDeliveryConfigurationParams.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParamsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParamsPb.java new file mode 100755 index 000000000..93ec1fc51 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParamsPb.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateLogDeliveryConfigurationParamsPb { + @JsonProperty("config_name") + private String configName; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("delivery_path_prefix") + private String deliveryPathPrefix; + + @JsonProperty("delivery_start_time") + private String deliveryStartTime; + + @JsonProperty("log_type") + private LogType logType; + + @JsonProperty("output_format") + private OutputFormat outputFormat; + + @JsonProperty("status") + private LogDeliveryConfigStatus status; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("workspace_ids_filter") + private Collection workspaceIdsFilter; + + public CreateLogDeliveryConfigurationParamsPb setConfigName(String configName) { + this.configName = configName; + return this; + } + + public String getConfigName() { + return configName; + } + + public CreateLogDeliveryConfigurationParamsPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public CreateLogDeliveryConfigurationParamsPb setDeliveryPathPrefix(String deliveryPathPrefix) { + this.deliveryPathPrefix = deliveryPathPrefix; + return this; + } + + public String getDeliveryPathPrefix() { + return deliveryPathPrefix; + } + + public CreateLogDeliveryConfigurationParamsPb setDeliveryStartTime(String deliveryStartTime) { + this.deliveryStartTime = deliveryStartTime; + return this; + } + + public String getDeliveryStartTime() { + return deliveryStartTime; + } + + public CreateLogDeliveryConfigurationParamsPb setLogType(LogType logType) { + this.logType = logType; + return this; + } + + public LogType getLogType() { + return logType; + } + + public CreateLogDeliveryConfigurationParamsPb setOutputFormat(OutputFormat outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + public OutputFormat getOutputFormat() { + return outputFormat; + } + + public CreateLogDeliveryConfigurationParamsPb setStatus(LogDeliveryConfigStatus status) { + this.status = status; + return this; + } + + public LogDeliveryConfigStatus getStatus() { + return status; + } + + public CreateLogDeliveryConfigurationParamsPb setStorageConfigurationId( + String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public CreateLogDeliveryConfigurationParamsPb setWorkspaceIdsFilter( + Collection workspaceIdsFilter) { + this.workspaceIdsFilter = workspaceIdsFilter; + return this; + } + + public Collection getWorkspaceIdsFilter() { + return workspaceIdsFilter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateLogDeliveryConfigurationParamsPb that = (CreateLogDeliveryConfigurationParamsPb) o; + return Objects.equals(configName, that.configName) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(deliveryPathPrefix, that.deliveryPathPrefix) + && Objects.equals(deliveryStartTime, that.deliveryStartTime) + && Objects.equals(logType, that.logType) + && Objects.equals(outputFormat, that.outputFormat) + && Objects.equals(status, that.status) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(workspaceIdsFilter, that.workspaceIdsFilter); + } + + @Override + public int hashCode() { + return Objects.hash( + configName, + credentialsId, + deliveryPathPrefix, + deliveryStartTime, + logType, + outputFormat, + status, + storageConfigurationId, + workspaceIdsFilter); + } + + @Override + public String toString() { + return new ToStringer(CreateLogDeliveryConfigurationParamsPb.class) + .add("configName", configName) + .add("credentialsId", credentialsId) + .add("deliveryPathPrefix", deliveryPathPrefix) + .add("deliveryStartTime", deliveryStartTime) + .add("logType", logType) + .add("outputFormat", outputFormat) + .add("status", status) + .add("storageConfigurationId", storageConfigurationId) + .add("workspaceIdsFilter", workspaceIdsFilter) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java index 61cac47b8..3df40903d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete budget */ @Generated +@JsonSerialize( + using = DeleteBudgetConfigurationRequest.DeleteBudgetConfigurationRequestSerializer.class) +@JsonDeserialize( + using = DeleteBudgetConfigurationRequest.DeleteBudgetConfigurationRequestDeserializer.class) public class DeleteBudgetConfigurationRequest { /** The Databricks budget configuration ID. */ - @JsonIgnore private String budgetId; + private String budgetId; public DeleteBudgetConfigurationRequest setBudgetId(String budgetId) { this.budgetId = budgetId; @@ -41,4 +54,42 @@ public String toString() { .add("budgetId", budgetId) .toString(); } + + DeleteBudgetConfigurationRequestPb toPb() { + DeleteBudgetConfigurationRequestPb pb = new DeleteBudgetConfigurationRequestPb(); + pb.setBudgetId(budgetId); + + return pb; + } + + static DeleteBudgetConfigurationRequest fromPb(DeleteBudgetConfigurationRequestPb pb) { + DeleteBudgetConfigurationRequest model = new DeleteBudgetConfigurationRequest(); + model.setBudgetId(pb.getBudgetId()); + + return model; + } + + public static class DeleteBudgetConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteBudgetConfigurationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteBudgetConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteBudgetConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteBudgetConfigurationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteBudgetConfigurationRequestPb pb = + mapper.readValue(p, DeleteBudgetConfigurationRequestPb.class); + return DeleteBudgetConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequestPb.java new file mode 100755 index 000000000..dbbb4044d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete budget */ +@Generated +class DeleteBudgetConfigurationRequestPb { + @JsonIgnore private String budgetId; + + public DeleteBudgetConfigurationRequestPb setBudgetId(String budgetId) { + this.budgetId = budgetId; + return this; + } + + public String getBudgetId() { + return budgetId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteBudgetConfigurationRequestPb that = (DeleteBudgetConfigurationRequestPb) o; + return Objects.equals(budgetId, that.budgetId); + } + + @Override + public int hashCode() { + return Objects.hash(budgetId); + } + + @Override + public String toString() { + return new ToStringer(DeleteBudgetConfigurationRequestPb.class) + .add("budgetId", budgetId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java index 32bb2a520..896d0e47a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DeleteBudgetConfigurationResponse.DeleteBudgetConfigurationResponseSerializer.class) +@JsonDeserialize( + using = DeleteBudgetConfigurationResponse.DeleteBudgetConfigurationResponseDeserializer.class) public class DeleteBudgetConfigurationResponse { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteBudgetConfigurationResponse.class).toString(); } + + DeleteBudgetConfigurationResponsePb toPb() { + DeleteBudgetConfigurationResponsePb pb = new DeleteBudgetConfigurationResponsePb(); + + return pb; + } + + static DeleteBudgetConfigurationResponse fromPb(DeleteBudgetConfigurationResponsePb pb) { + DeleteBudgetConfigurationResponse model = new DeleteBudgetConfigurationResponse(); + + return model; + } + + public static class DeleteBudgetConfigurationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteBudgetConfigurationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteBudgetConfigurationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteBudgetConfigurationResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteBudgetConfigurationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteBudgetConfigurationResponsePb pb = + mapper.readValue(p, DeleteBudgetConfigurationResponsePb.class); + return DeleteBudgetConfigurationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponsePb.java new file mode 100755 index 000000000..4a1969ac3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteBudgetConfigurationResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteBudgetConfigurationResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java index d5aabfb58..97290c1ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a budget policy */ @Generated +@JsonSerialize(using = DeleteBudgetPolicyRequest.DeleteBudgetPolicyRequestSerializer.class) +@JsonDeserialize(using = DeleteBudgetPolicyRequest.DeleteBudgetPolicyRequestDeserializer.class) public class DeleteBudgetPolicyRequest { /** The Id of the policy. */ - @JsonIgnore private String policyId; + private String policyId; public DeleteBudgetPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteBudgetPolicyRequest.class).add("policyId", policyId).toString(); } + + DeleteBudgetPolicyRequestPb toPb() { + DeleteBudgetPolicyRequestPb pb = new DeleteBudgetPolicyRequestPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static DeleteBudgetPolicyRequest fromPb(DeleteBudgetPolicyRequestPb pb) { + DeleteBudgetPolicyRequest model = new DeleteBudgetPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class DeleteBudgetPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteBudgetPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteBudgetPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteBudgetPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteBudgetPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteBudgetPolicyRequestPb pb = mapper.readValue(p, DeleteBudgetPolicyRequestPb.class); + return DeleteBudgetPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequestPb.java new file mode 100755 index 000000000..a3796051f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a budget policy */ +@Generated +class DeleteBudgetPolicyRequestPb { + @JsonIgnore private String policyId; + + public DeleteBudgetPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteBudgetPolicyRequestPb that = (DeleteBudgetPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteBudgetPolicyRequestPb.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java index 6b9b9aae8..f8670c3d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponsePb.java new file mode 100755 index 000000000..4be20bf62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java index 1db3a94b9..5aaeb40be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Return billable usage logs */ @Generated +@JsonSerialize(using = DownloadRequest.DownloadRequestSerializer.class) +@JsonDeserialize(using = DownloadRequest.DownloadRequestDeserializer.class) public class DownloadRequest { /** Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required. */ - @JsonIgnore - @QueryParam("end_month") private String endMonth; /** @@ -21,13 +29,9 @@ public class DownloadRequest { * example the email addresses of cluster creators. Handle this information with care. Defaults to * false. */ - @JsonIgnore - @QueryParam("personal_data") private Boolean personalData; /** Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. */ - @JsonIgnore - @QueryParam("start_month") private String startMonth; public DownloadRequest setEndMonth(String endMonth) { @@ -80,4 +84,42 @@ public String toString() { .add("startMonth", startMonth) .toString(); } + + DownloadRequestPb toPb() { + DownloadRequestPb pb = new DownloadRequestPb(); + pb.setEndMonth(endMonth); + pb.setPersonalData(personalData); + pb.setStartMonth(startMonth); + + return pb; + } + + static DownloadRequest fromPb(DownloadRequestPb pb) { + DownloadRequest model = new DownloadRequest(); + model.setEndMonth(pb.getEndMonth()); + model.setPersonalData(pb.getPersonalData()); + model.setStartMonth(pb.getStartMonth()); + + return model; + } + + public static class DownloadRequestSerializer extends JsonSerializer { + @Override + public void serialize(DownloadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DownloadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DownloadRequestDeserializer extends JsonDeserializer { + @Override + public DownloadRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DownloadRequestPb pb = mapper.readValue(p, DownloadRequestPb.class); + return DownloadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequestPb.java new file mode 100755 index 000000000..a044233f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Return billable usage logs */ +@Generated +class DownloadRequestPb { + @JsonIgnore + @QueryParam("end_month") + private String endMonth; + + @JsonIgnore + @QueryParam("personal_data") + private Boolean personalData; + + @JsonIgnore + @QueryParam("start_month") + private String startMonth; + + public DownloadRequestPb setEndMonth(String endMonth) { + this.endMonth = endMonth; + return this; + } + + public String getEndMonth() { + return endMonth; + } + + public DownloadRequestPb setPersonalData(Boolean personalData) { + this.personalData = personalData; + return this; + } + + public Boolean getPersonalData() { + return personalData; + } + + public DownloadRequestPb setStartMonth(String startMonth) { + this.startMonth = startMonth; + return this; + } + + public String getStartMonth() { + return startMonth; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DownloadRequestPb that = (DownloadRequestPb) o; + return Objects.equals(endMonth, that.endMonth) + && Objects.equals(personalData, that.personalData) + && Objects.equals(startMonth, that.startMonth); + } + + @Override + public int hashCode() { + return Objects.hash(endMonth, personalData, startMonth); + } + + @Override + public String toString() { + return new ToStringer(DownloadRequestPb.class) + .add("endMonth", endMonth) + .add("personalData", personalData) + .add("startMonth", startMonth) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java index 3b1e0d51e..e502bafff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; @Generated +@JsonSerialize(using = DownloadResponse.DownloadResponseSerializer.class) +@JsonDeserialize(using = DownloadResponse.DownloadResponseDeserializer.class) public class DownloadResponse { /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; public DownloadResponse setContents(InputStream contents) { this.contents = contents; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DownloadResponse.class).add("contents", contents).toString(); } + + DownloadResponsePb toPb() { + DownloadResponsePb pb = new DownloadResponsePb(); + pb.setContents(contents); + + return pb; + } + + static DownloadResponse fromPb(DownloadResponsePb pb) { + DownloadResponse model = new DownloadResponse(); + model.setContents(pb.getContents()); + + return model; + } + + public static class DownloadResponseSerializer extends JsonSerializer { + @Override + public void serialize(DownloadResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DownloadResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DownloadResponseDeserializer extends JsonDeserializer { + @Override + public DownloadResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DownloadResponsePb pb = mapper.readValue(p, DownloadResponsePb.class); + return DownloadResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponsePb.java new file mode 100755 index 000000000..806eacd6a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +@Generated +class DownloadResponsePb { + @JsonIgnore private InputStream contents; + + public DownloadResponsePb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DownloadResponsePb that = (DownloadResponsePb) o; + return Objects.equals(contents, that.contents); + } + + @Override + public int hashCode() { + return Objects.hash(contents); + } + + @Override + public String toString() { + return new ToStringer(DownloadResponsePb.class).add("contents", contents).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java index 25fb6093d..2804e601f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java @@ -3,9 +3,17 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,26 +21,22 @@ * will be applied in conjunction. */ @Generated +@JsonSerialize(using = Filter.FilterSerializer.class) +@JsonDeserialize(using = Filter.FilterDeserializer.class) public class Filter { /** * The policy creator user id to be filtered on. If unspecified, all policies will be returned. */ - @JsonProperty("creator_user_id") - @QueryParam("creator_user_id") private Long creatorUserId; /** * The policy creator user name to be filtered on. If unspecified, all policies will be returned. */ - @JsonProperty("creator_user_name") - @QueryParam("creator_user_name") private String creatorUserName; /** * The partial name of policies to be filtered on. If unspecified, all policies will be returned. */ - @JsonProperty("policy_name") - @QueryParam("policy_name") private String policyName; public Filter setCreatorUserId(Long creatorUserId) { @@ -85,4 +89,41 @@ public String toString() { .add("policyName", policyName) .toString(); } + + FilterPb toPb() { + FilterPb pb = new FilterPb(); + pb.setCreatorUserId(creatorUserId); + pb.setCreatorUserName(creatorUserName); + pb.setPolicyName(policyName); + + return pb; + } + + static Filter fromPb(FilterPb pb) { + Filter model = new Filter(); + model.setCreatorUserId(pb.getCreatorUserId()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setPolicyName(pb.getPolicyName()); + + return model; + } + + public static class FilterSerializer extends JsonSerializer { + @Override + public void serialize(Filter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FilterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FilterDeserializer extends JsonDeserializer { + @Override + public Filter deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FilterPb pb = mapper.readValue(p, FilterPb.class); + return Filter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/FilterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/FilterPb.java new file mode 100755 index 000000000..d0756cb83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/FilterPb.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Structured representation of a filter to be applied to a list of policies. All specified filters + * will be applied in conjunction. + */ +@Generated +class FilterPb { + @JsonProperty("creator_user_id") + @QueryParam("creator_user_id") + private Long creatorUserId; + + @JsonProperty("creator_user_name") + @QueryParam("creator_user_name") + private String creatorUserName; + + @JsonProperty("policy_name") + @QueryParam("policy_name") + private String policyName; + + public FilterPb setCreatorUserId(Long creatorUserId) { + this.creatorUserId = creatorUserId; + return this; + } + + public Long getCreatorUserId() { + return creatorUserId; + } + + public FilterPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public FilterPb setPolicyName(String policyName) { + this.policyName = policyName; + return this; + } + + public String getPolicyName() { + return policyName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilterPb that = (FilterPb) o; + return Objects.equals(creatorUserId, that.creatorUserId) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(policyName, that.policyName); + } + + @Override + public int hashCode() { + return Objects.hash(creatorUserId, creatorUserName, policyName); + } + + @Override + public String toString() { + return new ToStringer(FilterPb.class) + .add("creatorUserId", creatorUserId) + .add("creatorUserName", creatorUserName) + .add("policyName", policyName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java index e51a9a075..a60f73299 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java @@ -3,25 +3,33 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get usage dashboard */ @Generated +@JsonSerialize( + using = GetBillingUsageDashboardRequest.GetBillingUsageDashboardRequestSerializer.class) +@JsonDeserialize( + using = GetBillingUsageDashboardRequest.GetBillingUsageDashboardRequestDeserializer.class) public class GetBillingUsageDashboardRequest { /** * Workspace level usage dashboard shows usage data for the specified workspace ID. Global level * usage dashboard shows usage data for all workspaces in the account. */ - @JsonIgnore - @QueryParam("dashboard_type") private UsageDashboardType dashboardType; /** The workspace ID of the workspace in which the usage dashboard is created. */ - @JsonIgnore - @QueryParam("workspace_id") private Long workspaceId; public GetBillingUsageDashboardRequest setDashboardType(UsageDashboardType dashboardType) { @@ -63,4 +71,44 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + GetBillingUsageDashboardRequestPb toPb() { + GetBillingUsageDashboardRequestPb pb = new GetBillingUsageDashboardRequestPb(); + pb.setDashboardType(dashboardType); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static GetBillingUsageDashboardRequest fromPb(GetBillingUsageDashboardRequestPb pb) { + GetBillingUsageDashboardRequest model = new GetBillingUsageDashboardRequest(); + model.setDashboardType(pb.getDashboardType()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class GetBillingUsageDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetBillingUsageDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBillingUsageDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBillingUsageDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public GetBillingUsageDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBillingUsageDashboardRequestPb pb = + mapper.readValue(p, GetBillingUsageDashboardRequestPb.class); + return GetBillingUsageDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequestPb.java new file mode 100755 index 000000000..e4a7dad08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequestPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get usage dashboard */ +@Generated +class GetBillingUsageDashboardRequestPb { + @JsonIgnore + @QueryParam("dashboard_type") + private UsageDashboardType dashboardType; + + @JsonIgnore + @QueryParam("workspace_id") + private Long workspaceId; + + public GetBillingUsageDashboardRequestPb setDashboardType(UsageDashboardType dashboardType) { + this.dashboardType = dashboardType; + return this; + } + + public UsageDashboardType getDashboardType() { + return dashboardType; + } + + public GetBillingUsageDashboardRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBillingUsageDashboardRequestPb that = (GetBillingUsageDashboardRequestPb) o; + return Objects.equals(dashboardType, that.dashboardType) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardType, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetBillingUsageDashboardRequestPb.class) + .add("dashboardType", dashboardType) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponse.java index 5030e3c6f..3d24a2398 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponse.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GetBillingUsageDashboardResponse.GetBillingUsageDashboardResponseSerializer.class) +@JsonDeserialize( + using = GetBillingUsageDashboardResponse.GetBillingUsageDashboardResponseDeserializer.class) public class GetBillingUsageDashboardResponse { /** The unique id of the usage dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The URL of the usage dashboard. */ - @JsonProperty("dashboard_url") private String dashboardUrl; public GetBillingUsageDashboardResponse setDashboardId(String dashboardId) { @@ -56,4 +67,44 @@ public String toString() { .add("dashboardUrl", dashboardUrl) .toString(); } + + GetBillingUsageDashboardResponsePb toPb() { + GetBillingUsageDashboardResponsePb pb = new GetBillingUsageDashboardResponsePb(); + pb.setDashboardId(dashboardId); + pb.setDashboardUrl(dashboardUrl); + + return pb; + } + + static GetBillingUsageDashboardResponse fromPb(GetBillingUsageDashboardResponsePb pb) { + GetBillingUsageDashboardResponse model = new GetBillingUsageDashboardResponse(); + model.setDashboardId(pb.getDashboardId()); + model.setDashboardUrl(pb.getDashboardUrl()); + + return model; + } + + public static class GetBillingUsageDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetBillingUsageDashboardResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBillingUsageDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBillingUsageDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public GetBillingUsageDashboardResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBillingUsageDashboardResponsePb pb = + mapper.readValue(p, GetBillingUsageDashboardResponsePb.class); + return GetBillingUsageDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponsePb.java new file mode 100755 index 000000000..60593ab44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetBillingUsageDashboardResponsePb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("dashboard_url") + private String dashboardUrl; + + public GetBillingUsageDashboardResponsePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public GetBillingUsageDashboardResponsePb setDashboardUrl(String dashboardUrl) { + this.dashboardUrl = dashboardUrl; + return this; + } + + public String getDashboardUrl() { + return dashboardUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBillingUsageDashboardResponsePb that = (GetBillingUsageDashboardResponsePb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(dashboardUrl, that.dashboardUrl); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, dashboardUrl); + } + + @Override + public String toString() { + return new ToStringer(GetBillingUsageDashboardResponsePb.class) + .add("dashboardId", dashboardId) + .add("dashboardUrl", dashboardUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java index 6e34027da..517cd81eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get budget */ @Generated +@JsonSerialize(using = GetBudgetConfigurationRequest.GetBudgetConfigurationRequestSerializer.class) +@JsonDeserialize( + using = GetBudgetConfigurationRequest.GetBudgetConfigurationRequestDeserializer.class) public class GetBudgetConfigurationRequest { /** The budget configuration ID */ - @JsonIgnore private String budgetId; + private String budgetId; public GetBudgetConfigurationRequest setBudgetId(String budgetId) { this.budgetId = budgetId; @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetBudgetConfigurationRequest.class).add("budgetId", budgetId).toString(); } + + GetBudgetConfigurationRequestPb toPb() { + GetBudgetConfigurationRequestPb pb = new GetBudgetConfigurationRequestPb(); + pb.setBudgetId(budgetId); + + return pb; + } + + static GetBudgetConfigurationRequest fromPb(GetBudgetConfigurationRequestPb pb) { + GetBudgetConfigurationRequest model = new GetBudgetConfigurationRequest(); + model.setBudgetId(pb.getBudgetId()); + + return model; + } + + public static class GetBudgetConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetBudgetConfigurationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBudgetConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBudgetConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public GetBudgetConfigurationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBudgetConfigurationRequestPb pb = + mapper.readValue(p, GetBudgetConfigurationRequestPb.class); + return GetBudgetConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequestPb.java new file mode 100755 index 000000000..915c82912 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get budget */ +@Generated +class GetBudgetConfigurationRequestPb { + @JsonIgnore private String budgetId; + + public GetBudgetConfigurationRequestPb setBudgetId(String budgetId) { + this.budgetId = budgetId; + return this; + } + + public String getBudgetId() { + return budgetId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBudgetConfigurationRequestPb that = (GetBudgetConfigurationRequestPb) o; + return Objects.equals(budgetId, that.budgetId); + } + + @Override + public int hashCode() { + return Objects.hash(budgetId); + } + + @Override + public String toString() { + return new ToStringer(GetBudgetConfigurationRequestPb.class) + .add("budgetId", budgetId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java index bc3c78ad4..d2c693733 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GetBudgetConfigurationResponse.GetBudgetConfigurationResponseSerializer.class) +@JsonDeserialize( + using = GetBudgetConfigurationResponse.GetBudgetConfigurationResponseDeserializer.class) public class GetBudgetConfigurationResponse { /** */ - @JsonProperty("budget") private BudgetConfiguration budget; public GetBudgetConfigurationResponse setBudget(BudgetConfiguration budget) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetBudgetConfigurationResponse.class).add("budget", budget).toString(); } + + GetBudgetConfigurationResponsePb toPb() { + GetBudgetConfigurationResponsePb pb = new GetBudgetConfigurationResponsePb(); + pb.setBudget(budget); + + return pb; + } + + static GetBudgetConfigurationResponse fromPb(GetBudgetConfigurationResponsePb pb) { + GetBudgetConfigurationResponse model = new GetBudgetConfigurationResponse(); + model.setBudget(pb.getBudget()); + + return model; + } + + public static class GetBudgetConfigurationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetBudgetConfigurationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBudgetConfigurationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBudgetConfigurationResponseDeserializer + extends JsonDeserializer { + @Override + public GetBudgetConfigurationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBudgetConfigurationResponsePb pb = + mapper.readValue(p, GetBudgetConfigurationResponsePb.class); + return GetBudgetConfigurationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponsePb.java new file mode 100755 index 000000000..829610f62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetBudgetConfigurationResponsePb { + @JsonProperty("budget") + private BudgetConfiguration budget; + + public GetBudgetConfigurationResponsePb setBudget(BudgetConfiguration budget) { + this.budget = budget; + return this; + } + + public BudgetConfiguration getBudget() { + return budget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBudgetConfigurationResponsePb that = (GetBudgetConfigurationResponsePb) o; + return Objects.equals(budget, that.budget); + } + + @Override + public int hashCode() { + return Objects.hash(budget); + } + + @Override + public String toString() { + return new ToStringer(GetBudgetConfigurationResponsePb.class).add("budget", budget).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java index 12c33b5cd..f463ade80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a budget policy */ @Generated +@JsonSerialize(using = GetBudgetPolicyRequest.GetBudgetPolicyRequestSerializer.class) +@JsonDeserialize(using = GetBudgetPolicyRequest.GetBudgetPolicyRequestDeserializer.class) public class GetBudgetPolicyRequest { /** The Id of the policy. */ - @JsonIgnore private String policyId; + private String policyId; public GetBudgetPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetBudgetPolicyRequest.class).add("policyId", policyId).toString(); } + + GetBudgetPolicyRequestPb toPb() { + GetBudgetPolicyRequestPb pb = new GetBudgetPolicyRequestPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static GetBudgetPolicyRequest fromPb(GetBudgetPolicyRequestPb pb) { + GetBudgetPolicyRequest model = new GetBudgetPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class GetBudgetPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetBudgetPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBudgetPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBudgetPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetBudgetPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBudgetPolicyRequestPb pb = mapper.readValue(p, GetBudgetPolicyRequestPb.class); + return GetBudgetPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequestPb.java new file mode 100755 index 000000000..1ab9916ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a budget policy */ +@Generated +class GetBudgetPolicyRequestPb { + @JsonIgnore private String policyId; + + public GetBudgetPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBudgetPolicyRequestPb that = (GetBudgetPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(GetBudgetPolicyRequestPb.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java index d4b457c37..ec08970f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get log delivery configuration */ @Generated +@JsonSerialize(using = GetLogDeliveryRequest.GetLogDeliveryRequestSerializer.class) +@JsonDeserialize(using = GetLogDeliveryRequest.GetLogDeliveryRequestDeserializer.class) public class GetLogDeliveryRequest { /** Databricks log delivery configuration ID */ - @JsonIgnore private String logDeliveryConfigurationId; + private String logDeliveryConfigurationId; public GetLogDeliveryRequest setLogDeliveryConfigurationId(String logDeliveryConfigurationId) { this.logDeliveryConfigurationId = logDeliveryConfigurationId; @@ -41,4 +52,41 @@ public String toString() { .add("logDeliveryConfigurationId", logDeliveryConfigurationId) .toString(); } + + GetLogDeliveryRequestPb toPb() { + GetLogDeliveryRequestPb pb = new GetLogDeliveryRequestPb(); + pb.setLogDeliveryConfigurationId(logDeliveryConfigurationId); + + return pb; + } + + static GetLogDeliveryRequest fromPb(GetLogDeliveryRequestPb pb) { + GetLogDeliveryRequest model = new GetLogDeliveryRequest(); + model.setLogDeliveryConfigurationId(pb.getLogDeliveryConfigurationId()); + + return model; + } + + public static class GetLogDeliveryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLogDeliveryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetLogDeliveryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLogDeliveryRequestDeserializer + extends JsonDeserializer { + @Override + public GetLogDeliveryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLogDeliveryRequestPb pb = mapper.readValue(p, GetLogDeliveryRequestPb.class); + return GetLogDeliveryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequestPb.java new file mode 100755 index 000000000..fcbbd8621 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get log delivery configuration */ +@Generated +class GetLogDeliveryRequestPb { + @JsonIgnore private String logDeliveryConfigurationId; + + public GetLogDeliveryRequestPb setLogDeliveryConfigurationId(String logDeliveryConfigurationId) { + this.logDeliveryConfigurationId = logDeliveryConfigurationId; + return this; + } + + public String getLogDeliveryConfigurationId() { + return logDeliveryConfigurationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLogDeliveryRequestPb that = (GetLogDeliveryRequestPb) o; + return Objects.equals(logDeliveryConfigurationId, that.logDeliveryConfigurationId); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfigurationId); + } + + @Override + public String toString() { + return new ToStringer(GetLogDeliveryRequestPb.class) + .add("logDeliveryConfigurationId", logDeliveryConfigurationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java index b4062a5d3..f8d3db3b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java @@ -4,6 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -11,6 +21,8 @@ * control by enforcing the limit. */ @Generated +@JsonSerialize(using = LimitConfig.LimitConfigSerializer.class) +@JsonDeserialize(using = LimitConfig.LimitConfigDeserializer.class) public class LimitConfig { @Override @@ -29,4 +41,35 @@ public int hashCode() { public String toString() { return new ToStringer(LimitConfig.class).toString(); } + + LimitConfigPb toPb() { + LimitConfigPb pb = new LimitConfigPb(); + + return pb; + } + + static LimitConfig fromPb(LimitConfigPb pb) { + LimitConfig model = new LimitConfig(); + + return model; + } + + public static class LimitConfigSerializer extends JsonSerializer { + @Override + public void serialize(LimitConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LimitConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LimitConfigDeserializer extends JsonDeserializer { + @Override + public LimitConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LimitConfigPb pb = mapper.readValue(p, LimitConfigPb.class); + return LimitConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfigPb.java new file mode 100755 index 000000000..453588413 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfigPb.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** + * The limit configuration of the policy. Limit configuration provide a budget policy level cost + * control by enforcing the limit. + */ +@Generated +class LimitConfigPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LimitConfigPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java index 166d36749..a2036e5b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java @@ -3,20 +3,30 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get all budgets */ @Generated +@JsonSerialize( + using = ListBudgetConfigurationsRequest.ListBudgetConfigurationsRequestSerializer.class) +@JsonDeserialize( + using = ListBudgetConfigurationsRequest.ListBudgetConfigurationsRequestDeserializer.class) public class ListBudgetConfigurationsRequest { /** * A page token received from a previous get all budget configurations call. This token can be * used to retrieve the subsequent page. Requests first page if absent. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListBudgetConfigurationsRequest setPageToken(String pageToken) { @@ -47,4 +57,42 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListBudgetConfigurationsRequestPb toPb() { + ListBudgetConfigurationsRequestPb pb = new ListBudgetConfigurationsRequestPb(); + pb.setPageToken(pageToken); + + return pb; + } + + static ListBudgetConfigurationsRequest fromPb(ListBudgetConfigurationsRequestPb pb) { + ListBudgetConfigurationsRequest model = new ListBudgetConfigurationsRequest(); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListBudgetConfigurationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListBudgetConfigurationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListBudgetConfigurationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListBudgetConfigurationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListBudgetConfigurationsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListBudgetConfigurationsRequestPb pb = + mapper.readValue(p, ListBudgetConfigurationsRequestPb.class); + return ListBudgetConfigurationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequestPb.java new file mode 100755 index 000000000..892c58134 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get all budgets */ +@Generated +class ListBudgetConfigurationsRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListBudgetConfigurationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBudgetConfigurationsRequestPb that = (ListBudgetConfigurationsRequestPb) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListBudgetConfigurationsRequestPb.class) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java index c6c385543..6527cda95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListBudgetConfigurationsResponse.ListBudgetConfigurationsResponseSerializer.class) +@JsonDeserialize( + using = ListBudgetConfigurationsResponse.ListBudgetConfigurationsResponseDeserializer.class) public class ListBudgetConfigurationsResponse { /** */ - @JsonProperty("budgets") private Collection budgets; /** * Token which can be sent as `page_token` to retrieve the next page of results. If this field is * omitted, there are no subsequent budgets. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListBudgetConfigurationsResponse setBudgets(Collection budgets) { @@ -60,4 +71,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListBudgetConfigurationsResponsePb toPb() { + ListBudgetConfigurationsResponsePb pb = new ListBudgetConfigurationsResponsePb(); + pb.setBudgets(budgets); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListBudgetConfigurationsResponse fromPb(ListBudgetConfigurationsResponsePb pb) { + ListBudgetConfigurationsResponse model = new ListBudgetConfigurationsResponse(); + model.setBudgets(pb.getBudgets()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListBudgetConfigurationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListBudgetConfigurationsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListBudgetConfigurationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListBudgetConfigurationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListBudgetConfigurationsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListBudgetConfigurationsResponsePb pb = + mapper.readValue(p, ListBudgetConfigurationsResponsePb.class); + return ListBudgetConfigurationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponsePb.java new file mode 100755 index 000000000..b6c43a277 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListBudgetConfigurationsResponsePb { + @JsonProperty("budgets") + private Collection budgets; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListBudgetConfigurationsResponsePb setBudgets(Collection budgets) { + this.budgets = budgets; + return this; + } + + public Collection getBudgets() { + return budgets; + } + + public ListBudgetConfigurationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBudgetConfigurationsResponsePb that = (ListBudgetConfigurationsResponsePb) o; + return Objects.equals(budgets, that.budgets) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(budgets, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListBudgetConfigurationsResponsePb.class) + .add("budgets", budgets) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java index bfe3035ac..f33d726f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List policies */ @Generated +@JsonSerialize(using = ListBudgetPoliciesRequest.ListBudgetPoliciesRequestSerializer.class) +@JsonDeserialize(using = ListBudgetPoliciesRequest.ListBudgetPoliciesRequestDeserializer.class) public class ListBudgetPoliciesRequest { /** A filter to apply to the list of policies. */ - @JsonIgnore - @QueryParam("filter_by") private Filter filterBy; /** * The maximum number of budget policies to return. If unspecified, at most 100 budget policies * will be returned. The maximum value is 1000; values above 1000 will be coerced to 1000. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** @@ -31,13 +37,9 @@ public class ListBudgetPoliciesRequest { *

When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match * the call that provided the page token. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The sort specification. */ - @JsonIgnore - @QueryParam("sort_spec") private SortSpec sortSpec; public ListBudgetPoliciesRequest setFilterBy(Filter filterBy) { @@ -101,4 +103,47 @@ public String toString() { .add("sortSpec", sortSpec) .toString(); } + + ListBudgetPoliciesRequestPb toPb() { + ListBudgetPoliciesRequestPb pb = new ListBudgetPoliciesRequestPb(); + pb.setFilterBy(filterBy); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setSortSpec(sortSpec); + + return pb; + } + + static ListBudgetPoliciesRequest fromPb(ListBudgetPoliciesRequestPb pb) { + ListBudgetPoliciesRequest model = new ListBudgetPoliciesRequest(); + model.setFilterBy(pb.getFilterBy()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setSortSpec(pb.getSortSpec()); + + return model; + } + + public static class ListBudgetPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListBudgetPoliciesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListBudgetPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListBudgetPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListBudgetPoliciesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListBudgetPoliciesRequestPb pb = mapper.readValue(p, ListBudgetPoliciesRequestPb.class); + return ListBudgetPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequestPb.java new file mode 100755 index 000000000..b038f5008 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List policies */ +@Generated +class ListBudgetPoliciesRequestPb { + @JsonIgnore + @QueryParam("filter_by") + private Filter filterBy; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("sort_spec") + private SortSpec sortSpec; + + public ListBudgetPoliciesRequestPb setFilterBy(Filter filterBy) { + this.filterBy = filterBy; + return this; + } + + public Filter getFilterBy() { + return filterBy; + } + + public ListBudgetPoliciesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListBudgetPoliciesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListBudgetPoliciesRequestPb setSortSpec(SortSpec sortSpec) { + this.sortSpec = sortSpec; + return this; + } + + public SortSpec getSortSpec() { + return sortSpec; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBudgetPoliciesRequestPb that = (ListBudgetPoliciesRequestPb) o; + return Objects.equals(filterBy, that.filterBy) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(sortSpec, that.sortSpec); + } + + @Override + public int hashCode() { + return Objects.hash(filterBy, pageSize, pageToken, sortSpec); + } + + @Override + public String toString() { + return new ToStringer(ListBudgetPoliciesRequestPb.class) + .add("filterBy", filterBy) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("sortSpec", sortSpec) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java index 6ab49dbca..ae424b22a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java @@ -4,29 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A list of policies. */ @Generated +@JsonSerialize(using = ListBudgetPoliciesResponse.ListBudgetPoliciesResponseSerializer.class) +@JsonDeserialize(using = ListBudgetPoliciesResponse.ListBudgetPoliciesResponseDeserializer.class) public class ListBudgetPoliciesResponse { /** * A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, * there are no subsequent pages. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("policies") private Collection policies; /** * A token that can be sent as `page_token` to retrieve the previous page. In this field is * omitted, there are no previous pages. */ - @JsonProperty("previous_page_token") private String previousPageToken; public ListBudgetPoliciesResponse setNextPageToken(String nextPageToken) { @@ -79,4 +87,45 @@ public String toString() { .add("previousPageToken", previousPageToken) .toString(); } + + ListBudgetPoliciesResponsePb toPb() { + ListBudgetPoliciesResponsePb pb = new ListBudgetPoliciesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPolicies(policies); + pb.setPreviousPageToken(previousPageToken); + + return pb; + } + + static ListBudgetPoliciesResponse fromPb(ListBudgetPoliciesResponsePb pb) { + ListBudgetPoliciesResponse model = new ListBudgetPoliciesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPolicies(pb.getPolicies()); + model.setPreviousPageToken(pb.getPreviousPageToken()); + + return model; + } + + public static class ListBudgetPoliciesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListBudgetPoliciesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListBudgetPoliciesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListBudgetPoliciesResponseDeserializer + extends JsonDeserializer { + @Override + public ListBudgetPoliciesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListBudgetPoliciesResponsePb pb = mapper.readValue(p, ListBudgetPoliciesResponsePb.class); + return ListBudgetPoliciesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponsePb.java new file mode 100755 index 000000000..cf551e32f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponsePb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A list of policies. */ +@Generated +class ListBudgetPoliciesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("policies") + private Collection policies; + + @JsonProperty("previous_page_token") + private String previousPageToken; + + public ListBudgetPoliciesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListBudgetPoliciesResponsePb setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + public ListBudgetPoliciesResponsePb setPreviousPageToken(String previousPageToken) { + this.previousPageToken = previousPageToken; + return this; + } + + public String getPreviousPageToken() { + return previousPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBudgetPoliciesResponsePb that = (ListBudgetPoliciesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policies, that.policies) + && Objects.equals(previousPageToken, that.previousPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policies, previousPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListBudgetPoliciesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("policies", policies) + .add("previousPageToken", previousPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java index c68133ca6..5b428744d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get all log delivery configurations */ @Generated +@JsonSerialize(using = ListLogDeliveryRequest.ListLogDeliveryRequestSerializer.class) +@JsonDeserialize(using = ListLogDeliveryRequest.ListLogDeliveryRequestDeserializer.class) public class ListLogDeliveryRequest { /** Filter by credential configuration ID. */ - @JsonIgnore - @QueryParam("credentials_id") private String credentialsId; /** Filter by status `ENABLED` or `DISABLED`. */ - @JsonIgnore - @QueryParam("status") private LogDeliveryConfigStatus status; /** Filter by storage configuration ID. */ - @JsonIgnore - @QueryParam("storage_configuration_id") private String storageConfigurationId; public ListLogDeliveryRequest setCredentialsId(String credentialsId) { @@ -76,4 +80,45 @@ public String toString() { .add("storageConfigurationId", storageConfigurationId) .toString(); } + + ListLogDeliveryRequestPb toPb() { + ListLogDeliveryRequestPb pb = new ListLogDeliveryRequestPb(); + pb.setCredentialsId(credentialsId); + pb.setStatus(status); + pb.setStorageConfigurationId(storageConfigurationId); + + return pb; + } + + static ListLogDeliveryRequest fromPb(ListLogDeliveryRequestPb pb) { + ListLogDeliveryRequest model = new ListLogDeliveryRequest(); + model.setCredentialsId(pb.getCredentialsId()); + model.setStatus(pb.getStatus()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + + return model; + } + + public static class ListLogDeliveryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListLogDeliveryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListLogDeliveryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListLogDeliveryRequestDeserializer + extends JsonDeserializer { + @Override + public ListLogDeliveryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListLogDeliveryRequestPb pb = mapper.readValue(p, ListLogDeliveryRequestPb.class); + return ListLogDeliveryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequestPb.java new file mode 100755 index 000000000..b4009b7cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get all log delivery configurations */ +@Generated +class ListLogDeliveryRequestPb { + @JsonIgnore + @QueryParam("credentials_id") + private String credentialsId; + + @JsonIgnore + @QueryParam("status") + private LogDeliveryConfigStatus status; + + @JsonIgnore + @QueryParam("storage_configuration_id") + private String storageConfigurationId; + + public ListLogDeliveryRequestPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public ListLogDeliveryRequestPb setStatus(LogDeliveryConfigStatus status) { + this.status = status; + return this; + } + + public LogDeliveryConfigStatus getStatus() { + return status; + } + + public ListLogDeliveryRequestPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListLogDeliveryRequestPb that = (ListLogDeliveryRequestPb) o; + return Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(status, that.status) + && Objects.equals(storageConfigurationId, that.storageConfigurationId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialsId, status, storageConfigurationId); + } + + @Override + public String toString() { + return new ToStringer(ListLogDeliveryRequestPb.class) + .add("credentialsId", credentialsId) + .add("status", status) + .add("storageConfigurationId", storageConfigurationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java index 6a6f6521b..ef3e0964f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LogDeliveryConfiguration.LogDeliveryConfigurationSerializer.class) +@JsonDeserialize(using = LogDeliveryConfiguration.LogDeliveryConfigurationDeserializer.class) public class LogDeliveryConfiguration { /** The Databricks account ID that hosts the log delivery configuration. */ - @JsonProperty("account_id") private String accountId; /** Databricks log delivery configuration ID. */ - @JsonProperty("config_id") private String configId; /** The optional human-readable name of the log delivery configuration. Defaults to empty. */ - @JsonProperty("config_name") private String configName; /** Time in epoch milliseconds when the log delivery configuration was created. */ - @JsonProperty("creation_time") private Long creationTime; /** @@ -34,7 +41,6 @@ public class LogDeliveryConfiguration { *

[Configure billable usage delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("credentials_id") private String credentialsId; /** @@ -42,7 +48,6 @@ public class LogDeliveryConfiguration { * logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not * start or end with a slash character. */ - @JsonProperty("delivery_path_prefix") private String deliveryPathPrefix; /** @@ -50,11 +55,9 @@ public class LogDeliveryConfiguration { * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`). */ - @JsonProperty("delivery_start_time") private String deliveryStartTime; /** Databricks log delivery status. */ - @JsonProperty("log_delivery_status") private LogDeliveryStatus logDeliveryStatus; /** @@ -74,7 +77,6 @@ public class LogDeliveryConfiguration { * usage log delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("log_type") private LogType logType; /** @@ -89,7 +91,6 @@ public class LogDeliveryConfiguration { * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View * billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html */ - @JsonProperty("output_format") private OutputFormat outputFormat; /** @@ -98,7 +99,6 @@ public class LogDeliveryConfiguration { * configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration * is not supported, so disable a log delivery configuration that is no longer needed. */ - @JsonProperty("status") private LogDeliveryConfigStatus status; /** @@ -109,11 +109,9 @@ public class LogDeliveryConfiguration { *

[Configure billable usage delivery]: * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** Time in epoch milliseconds when the log delivery configuration was updated. */ - @JsonProperty("update_time") private Long updateTime; /** @@ -128,7 +126,6 @@ public class LogDeliveryConfiguration { * types of Databricks deployments there is only one workspace per account ID, so this field is * unnecessary. */ - @JsonProperty("workspace_ids_filter") private Collection workspaceIdsFilter; public LogDeliveryConfiguration setAccountId(String accountId) { @@ -316,4 +313,67 @@ public String toString() { .add("workspaceIdsFilter", workspaceIdsFilter) .toString(); } + + LogDeliveryConfigurationPb toPb() { + LogDeliveryConfigurationPb pb = new LogDeliveryConfigurationPb(); + pb.setAccountId(accountId); + pb.setConfigId(configId); + pb.setConfigName(configName); + pb.setCreationTime(creationTime); + pb.setCredentialsId(credentialsId); + pb.setDeliveryPathPrefix(deliveryPathPrefix); + pb.setDeliveryStartTime(deliveryStartTime); + pb.setLogDeliveryStatus(logDeliveryStatus); + pb.setLogType(logType); + pb.setOutputFormat(outputFormat); + pb.setStatus(status); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setUpdateTime(updateTime); + pb.setWorkspaceIdsFilter(workspaceIdsFilter); + + return pb; + } + + static LogDeliveryConfiguration fromPb(LogDeliveryConfigurationPb pb) { + LogDeliveryConfiguration model = new LogDeliveryConfiguration(); + model.setAccountId(pb.getAccountId()); + model.setConfigId(pb.getConfigId()); + model.setConfigName(pb.getConfigName()); + model.setCreationTime(pb.getCreationTime()); + model.setCredentialsId(pb.getCredentialsId()); + model.setDeliveryPathPrefix(pb.getDeliveryPathPrefix()); + model.setDeliveryStartTime(pb.getDeliveryStartTime()); + model.setLogDeliveryStatus(pb.getLogDeliveryStatus()); + model.setLogType(pb.getLogType()); + model.setOutputFormat(pb.getOutputFormat()); + model.setStatus(pb.getStatus()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWorkspaceIdsFilter(pb.getWorkspaceIdsFilter()); + + return model; + } + + public static class LogDeliveryConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + LogDeliveryConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogDeliveryConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogDeliveryConfigurationDeserializer + extends JsonDeserializer { + @Override + public LogDeliveryConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogDeliveryConfigurationPb pb = mapper.readValue(p, LogDeliveryConfigurationPb.class); + return LogDeliveryConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigurationPb.java new file mode 100755 index 000000000..2bec00f91 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigurationPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LogDeliveryConfigurationPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("config_id") + private String configId; + + @JsonProperty("config_name") + private String configName; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("delivery_path_prefix") + private String deliveryPathPrefix; + + @JsonProperty("delivery_start_time") + private String deliveryStartTime; + + @JsonProperty("log_delivery_status") + private LogDeliveryStatus logDeliveryStatus; + + @JsonProperty("log_type") + private LogType logType; + + @JsonProperty("output_format") + private OutputFormat outputFormat; + + @JsonProperty("status") + private LogDeliveryConfigStatus status; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("update_time") + private Long updateTime; + + @JsonProperty("workspace_ids_filter") + private Collection workspaceIdsFilter; + + public LogDeliveryConfigurationPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public LogDeliveryConfigurationPb setConfigId(String configId) { + this.configId = configId; + return this; + } + + public String getConfigId() { + return configId; + } + + public LogDeliveryConfigurationPb setConfigName(String configName) { + this.configName = configName; + return this; + } + + public String getConfigName() { + return configName; + } + + public LogDeliveryConfigurationPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public LogDeliveryConfigurationPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public LogDeliveryConfigurationPb setDeliveryPathPrefix(String deliveryPathPrefix) { + this.deliveryPathPrefix = deliveryPathPrefix; + return this; + } + + public String getDeliveryPathPrefix() { + return deliveryPathPrefix; + } + + public LogDeliveryConfigurationPb setDeliveryStartTime(String deliveryStartTime) { + this.deliveryStartTime = deliveryStartTime; + return this; + } + + public String getDeliveryStartTime() { + return deliveryStartTime; + } + + public LogDeliveryConfigurationPb setLogDeliveryStatus(LogDeliveryStatus logDeliveryStatus) { + this.logDeliveryStatus = logDeliveryStatus; + return this; + } + + public LogDeliveryStatus getLogDeliveryStatus() { + return logDeliveryStatus; + } + + public LogDeliveryConfigurationPb setLogType(LogType logType) { + this.logType = logType; + return this; + } + + public LogType getLogType() { + return logType; + } + + public LogDeliveryConfigurationPb setOutputFormat(OutputFormat outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + public OutputFormat getOutputFormat() { + return outputFormat; + } + + public LogDeliveryConfigurationPb setStatus(LogDeliveryConfigStatus status) { + this.status = status; + return this; + } + + public LogDeliveryConfigStatus getStatus() { + return status; + } + + public LogDeliveryConfigurationPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public LogDeliveryConfigurationPb setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + return this; + } + + public Long getUpdateTime() { + return updateTime; + } + + public LogDeliveryConfigurationPb setWorkspaceIdsFilter(Collection workspaceIdsFilter) { + this.workspaceIdsFilter = workspaceIdsFilter; + return this; + } + + public Collection getWorkspaceIdsFilter() { + return workspaceIdsFilter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogDeliveryConfigurationPb that = (LogDeliveryConfigurationPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(configId, that.configId) + && Objects.equals(configName, that.configName) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(deliveryPathPrefix, that.deliveryPathPrefix) + && Objects.equals(deliveryStartTime, that.deliveryStartTime) + && Objects.equals(logDeliveryStatus, that.logDeliveryStatus) + && Objects.equals(logType, that.logType) + && Objects.equals(outputFormat, that.outputFormat) + && Objects.equals(status, that.status) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(workspaceIdsFilter, that.workspaceIdsFilter); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + configId, + configName, + creationTime, + credentialsId, + deliveryPathPrefix, + deliveryStartTime, + logDeliveryStatus, + logType, + outputFormat, + status, + storageConfigurationId, + updateTime, + workspaceIdsFilter); + } + + @Override + public String toString() { + return new ToStringer(LogDeliveryConfigurationPb.class) + .add("accountId", accountId) + .add("configId", configId) + .add("configName", configName) + .add("creationTime", creationTime) + .add("credentialsId", credentialsId) + .add("deliveryPathPrefix", deliveryPathPrefix) + .add("deliveryStartTime", deliveryStartTime) + .add("logDeliveryStatus", logDeliveryStatus) + .add("logType", logType) + .add("outputFormat", outputFormat) + .add("status", status) + .add("storageConfigurationId", storageConfigurationId) + .add("updateTime", updateTime) + .add("workspaceIdsFilter", workspaceIdsFilter) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java index dd4e64dcf..f8bf4fd8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java @@ -22,7 +22,7 @@ public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfigurat String.format("/api/2.0/accounts/%s/log-delivery", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WrappedLogDeliveryConfiguration.class); @@ -39,7 +39,7 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { apiClient.configuredAccountID(), request.getLogDeliveryConfigurationId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WrappedLogDeliveryConfiguration.class); } catch (IOException e) { @@ -53,7 +53,7 @@ public WrappedLogDeliveryConfigurations list(ListLogDeliveryRequest request) { String.format("/api/2.0/accounts/%s/log-delivery", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WrappedLogDeliveryConfigurations.class); } catch (IOException e) { @@ -69,7 +69,7 @@ public void patchStatus(UpdateLogDeliveryConfigurationStatusRequest request) { apiClient.configuredAccountID(), request.getLogDeliveryConfigurationId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchStatusResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java index 5c37c00aa..d3af1ade8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Databricks log delivery status. */ @Generated +@JsonSerialize(using = LogDeliveryStatus.LogDeliveryStatusSerializer.class) +@JsonDeserialize(using = LogDeliveryStatus.LogDeliveryStatusDeserializer.class) public class LogDeliveryStatus { /** The UTC time for the latest log delivery attempt. */ - @JsonProperty("last_attempt_time") private String lastAttemptTime; /** The UTC time for the latest successful log delivery. */ - @JsonProperty("last_successful_attempt_time") private String lastSuccessfulAttemptTime; /** * Informative message about the latest log delivery attempt. If the log delivery fails with * USER_FAILURE, error details will be provided for fixing misconfigurations in cloud permissions. */ - @JsonProperty("message") private String message; /** @@ -35,7 +43,6 @@ public class LogDeliveryStatus { * the configuration has been disabled since the release of this feature or there are no * workspaces in the account. */ - @JsonProperty("status") private DeliveryStatus status; public LogDeliveryStatus setLastAttemptTime(String lastAttemptTime) { @@ -99,4 +106,44 @@ public String toString() { .add("status", status) .toString(); } + + LogDeliveryStatusPb toPb() { + LogDeliveryStatusPb pb = new LogDeliveryStatusPb(); + pb.setLastAttemptTime(lastAttemptTime); + pb.setLastSuccessfulAttemptTime(lastSuccessfulAttemptTime); + pb.setMessage(message); + pb.setStatus(status); + + return pb; + } + + static LogDeliveryStatus fromPb(LogDeliveryStatusPb pb) { + LogDeliveryStatus model = new LogDeliveryStatus(); + model.setLastAttemptTime(pb.getLastAttemptTime()); + model.setLastSuccessfulAttemptTime(pb.getLastSuccessfulAttemptTime()); + model.setMessage(pb.getMessage()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class LogDeliveryStatusSerializer extends JsonSerializer { + @Override + public void serialize(LogDeliveryStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogDeliveryStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogDeliveryStatusDeserializer extends JsonDeserializer { + @Override + public LogDeliveryStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogDeliveryStatusPb pb = mapper.readValue(p, LogDeliveryStatusPb.class); + return LogDeliveryStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatusPb.java new file mode 100755 index 000000000..8e9eb503c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatusPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Databricks log delivery status. */ +@Generated +class LogDeliveryStatusPb { + @JsonProperty("last_attempt_time") + private String lastAttemptTime; + + @JsonProperty("last_successful_attempt_time") + private String lastSuccessfulAttemptTime; + + @JsonProperty("message") + private String message; + + @JsonProperty("status") + private DeliveryStatus status; + + public LogDeliveryStatusPb setLastAttemptTime(String lastAttemptTime) { + this.lastAttemptTime = lastAttemptTime; + return this; + } + + public String getLastAttemptTime() { + return lastAttemptTime; + } + + public LogDeliveryStatusPb setLastSuccessfulAttemptTime(String lastSuccessfulAttemptTime) { + this.lastSuccessfulAttemptTime = lastSuccessfulAttemptTime; + return this; + } + + public String getLastSuccessfulAttemptTime() { + return lastSuccessfulAttemptTime; + } + + public LogDeliveryStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public LogDeliveryStatusPb setStatus(DeliveryStatus status) { + this.status = status; + return this; + } + + public DeliveryStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogDeliveryStatusPb that = (LogDeliveryStatusPb) o; + return Objects.equals(lastAttemptTime, that.lastAttemptTime) + && Objects.equals(lastSuccessfulAttemptTime, that.lastSuccessfulAttemptTime) + && Objects.equals(message, that.message) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(lastAttemptTime, lastSuccessfulAttemptTime, message, status); + } + + @Override + public String toString() { + return new ToStringer(LogDeliveryStatusPb.class) + .add("lastAttemptTime", lastAttemptTime) + .add("lastSuccessfulAttemptTime", lastSuccessfulAttemptTime) + .add("message", message) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java index aa3dee4e9..7d5d1d250 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PatchStatusResponse.PatchStatusResponseSerializer.class) +@JsonDeserialize(using = PatchStatusResponse.PatchStatusResponseDeserializer.class) public class PatchStatusResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(PatchStatusResponse.class).toString(); } + + PatchStatusResponsePb toPb() { + PatchStatusResponsePb pb = new PatchStatusResponsePb(); + + return pb; + } + + static PatchStatusResponse fromPb(PatchStatusResponsePb pb) { + PatchStatusResponse model = new PatchStatusResponse(); + + return model; + } + + public static class PatchStatusResponseSerializer extends JsonSerializer { + @Override + public void serialize(PatchStatusResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchStatusResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchStatusResponseDeserializer + extends JsonDeserializer { + @Override + public PatchStatusResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchStatusResponsePb pb = mapper.readValue(p, PatchStatusResponsePb.class); + return PatchStatusResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponsePb.java new file mode 100755 index 000000000..ac8e4e7cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PatchStatusResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PatchStatusResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java index a0e3d2442..e6ce34310 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java @@ -3,21 +3,27 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SortSpec.SortSpecSerializer.class) +@JsonDeserialize(using = SortSpec.SortSpecDeserializer.class) public class SortSpec { /** Whether to sort in descending order. */ - @JsonProperty("descending") - @QueryParam("descending") private Boolean descending; /** The filed to sort by */ - @JsonProperty("field") - @QueryParam("field") private SortSpecField field; public SortSpec setDescending(Boolean descending) { @@ -58,4 +64,39 @@ public String toString() { .add("field", field) .toString(); } + + SortSpecPb toPb() { + SortSpecPb pb = new SortSpecPb(); + pb.setDescending(descending); + pb.setField(field); + + return pb; + } + + static SortSpec fromPb(SortSpecPb pb) { + SortSpec model = new SortSpec(); + model.setDescending(pb.getDescending()); + model.setField(pb.getField()); + + return model; + } + + public static class SortSpecSerializer extends JsonSerializer { + @Override + public void serialize(SortSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SortSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SortSpecDeserializer extends JsonDeserializer { + @Override + public SortSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SortSpecPb pb = mapper.readValue(p, SortSpecPb.class); + return SortSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecPb.java new file mode 100755 index 000000000..46658f8a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SortSpecPb { + @JsonProperty("descending") + @QueryParam("descending") + private Boolean descending; + + @JsonProperty("field") + @QueryParam("field") + private SortSpecField field; + + public SortSpecPb setDescending(Boolean descending) { + this.descending = descending; + return this; + } + + public Boolean getDescending() { + return descending; + } + + public SortSpecPb setField(SortSpecField field) { + this.field = field; + return this; + } + + public SortSpecField getField() { + return field; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SortSpecPb that = (SortSpecPb) o; + return Objects.equals(descending, that.descending) && Objects.equals(field, that.field); + } + + @Override + public int hashCode() { + return Objects.hash(descending, field); + } + + @Override + public String toString() { + return new ToStringer(SortSpecPb.class) + .add("descending", descending) + .add("field", field) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java index d8a2f8411..976b7cb54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java @@ -4,29 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateBudgetConfigurationBudget.UpdateBudgetConfigurationBudgetSerializer.class) +@JsonDeserialize( + using = UpdateBudgetConfigurationBudget.UpdateBudgetConfigurationBudgetDeserializer.class) public class UpdateBudgetConfigurationBudget { /** Databricks account ID. */ - @JsonProperty("account_id") private String accountId; /** * Alerts to configure when this budget is in a triggered state. Budgets must have exactly one * alert configuration. */ - @JsonProperty("alert_configurations") private Collection alertConfigurations; /** Databricks budget configuration ID. */ - @JsonProperty("budget_configuration_id") private String budgetConfigurationId; /** Human-readable name of budget configuration. Max Length: 128 */ - @JsonProperty("display_name") private String displayName; /** @@ -34,7 +43,6 @@ public class UpdateBudgetConfigurationBudget { * scope of what is considered for this budget. Leave empty to include all usage for this account. * All provided filters must be matched for usage to be included. */ - @JsonProperty("filter") private BudgetConfigurationFilter filter; public UpdateBudgetConfigurationBudget setAccountId(String accountId) { @@ -110,4 +118,50 @@ public String toString() { .add("filter", filter) .toString(); } + + UpdateBudgetConfigurationBudgetPb toPb() { + UpdateBudgetConfigurationBudgetPb pb = new UpdateBudgetConfigurationBudgetPb(); + pb.setAccountId(accountId); + pb.setAlertConfigurations(alertConfigurations); + pb.setBudgetConfigurationId(budgetConfigurationId); + pb.setDisplayName(displayName); + pb.setFilter(filter); + + return pb; + } + + static UpdateBudgetConfigurationBudget fromPb(UpdateBudgetConfigurationBudgetPb pb) { + UpdateBudgetConfigurationBudget model = new UpdateBudgetConfigurationBudget(); + model.setAccountId(pb.getAccountId()); + model.setAlertConfigurations(pb.getAlertConfigurations()); + model.setBudgetConfigurationId(pb.getBudgetConfigurationId()); + model.setDisplayName(pb.getDisplayName()); + model.setFilter(pb.getFilter()); + + return model; + } + + public static class UpdateBudgetConfigurationBudgetSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateBudgetConfigurationBudget value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateBudgetConfigurationBudgetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateBudgetConfigurationBudgetDeserializer + extends JsonDeserializer { + @Override + public UpdateBudgetConfigurationBudget deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateBudgetConfigurationBudgetPb pb = + mapper.readValue(p, UpdateBudgetConfigurationBudgetPb.class); + return UpdateBudgetConfigurationBudget.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudgetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudgetPb.java new file mode 100755 index 000000000..34f4398c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudgetPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateBudgetConfigurationBudgetPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("alert_configurations") + private Collection alertConfigurations; + + @JsonProperty("budget_configuration_id") + private String budgetConfigurationId; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("filter") + private BudgetConfigurationFilter filter; + + public UpdateBudgetConfigurationBudgetPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public UpdateBudgetConfigurationBudgetPb setAlertConfigurations( + Collection alertConfigurations) { + this.alertConfigurations = alertConfigurations; + return this; + } + + public Collection getAlertConfigurations() { + return alertConfigurations; + } + + public UpdateBudgetConfigurationBudgetPb setBudgetConfigurationId(String budgetConfigurationId) { + this.budgetConfigurationId = budgetConfigurationId; + return this; + } + + public String getBudgetConfigurationId() { + return budgetConfigurationId; + } + + public UpdateBudgetConfigurationBudgetPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateBudgetConfigurationBudgetPb setFilter(BudgetConfigurationFilter filter) { + this.filter = filter; + return this; + } + + public BudgetConfigurationFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateBudgetConfigurationBudgetPb that = (UpdateBudgetConfigurationBudgetPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(alertConfigurations, that.alertConfigurations) + && Objects.equals(budgetConfigurationId, that.budgetConfigurationId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(accountId, alertConfigurations, budgetConfigurationId, displayName, filter); + } + + @Override + public String toString() { + return new ToStringer(UpdateBudgetConfigurationBudgetPb.class) + .add("accountId", accountId) + .add("alertConfigurations", alertConfigurations) + .add("budgetConfigurationId", budgetConfigurationId) + .add("displayName", displayName) + .add("filter", filter) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequest.java index 94f800b99..6c6a03c8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequest.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateBudgetConfigurationRequest.UpdateBudgetConfigurationRequestSerializer.class) +@JsonDeserialize( + using = UpdateBudgetConfigurationRequest.UpdateBudgetConfigurationRequestDeserializer.class) public class UpdateBudgetConfigurationRequest { /** The updated budget. This will overwrite the budget specified by the budget ID. */ - @JsonProperty("budget") private UpdateBudgetConfigurationBudget budget; /** The Databricks budget configuration ID. */ - @JsonIgnore private String budgetId; + private String budgetId; public UpdateBudgetConfigurationRequest setBudget(UpdateBudgetConfigurationBudget budget) { this.budget = budget; @@ -55,4 +66,44 @@ public String toString() { .add("budgetId", budgetId) .toString(); } + + UpdateBudgetConfigurationRequestPb toPb() { + UpdateBudgetConfigurationRequestPb pb = new UpdateBudgetConfigurationRequestPb(); + pb.setBudget(budget); + pb.setBudgetId(budgetId); + + return pb; + } + + static UpdateBudgetConfigurationRequest fromPb(UpdateBudgetConfigurationRequestPb pb) { + UpdateBudgetConfigurationRequest model = new UpdateBudgetConfigurationRequest(); + model.setBudget(pb.getBudget()); + model.setBudgetId(pb.getBudgetId()); + + return model; + } + + public static class UpdateBudgetConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateBudgetConfigurationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateBudgetConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateBudgetConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateBudgetConfigurationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateBudgetConfigurationRequestPb pb = + mapper.readValue(p, UpdateBudgetConfigurationRequestPb.class); + return UpdateBudgetConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequestPb.java new file mode 100755 index 000000000..ea8f26b85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateBudgetConfigurationRequestPb { + @JsonProperty("budget") + private UpdateBudgetConfigurationBudget budget; + + @JsonIgnore private String budgetId; + + public UpdateBudgetConfigurationRequestPb setBudget(UpdateBudgetConfigurationBudget budget) { + this.budget = budget; + return this; + } + + public UpdateBudgetConfigurationBudget getBudget() { + return budget; + } + + public UpdateBudgetConfigurationRequestPb setBudgetId(String budgetId) { + this.budgetId = budgetId; + return this; + } + + public String getBudgetId() { + return budgetId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateBudgetConfigurationRequestPb that = (UpdateBudgetConfigurationRequestPb) o; + return Objects.equals(budget, that.budget) && Objects.equals(budgetId, that.budgetId); + } + + @Override + public int hashCode() { + return Objects.hash(budget, budgetId); + } + + @Override + public String toString() { + return new ToStringer(UpdateBudgetConfigurationRequestPb.class) + .add("budget", budget) + .add("budgetId", budgetId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java index 7391e4cdc..1a8dec9ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateBudgetConfigurationResponse.UpdateBudgetConfigurationResponseSerializer.class) +@JsonDeserialize( + using = UpdateBudgetConfigurationResponse.UpdateBudgetConfigurationResponseDeserializer.class) public class UpdateBudgetConfigurationResponse { /** The updated budget. */ - @JsonProperty("budget") private BudgetConfiguration budget; public UpdateBudgetConfigurationResponse setBudget(BudgetConfiguration budget) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateBudgetConfigurationResponse.class).add("budget", budget).toString(); } + + UpdateBudgetConfigurationResponsePb toPb() { + UpdateBudgetConfigurationResponsePb pb = new UpdateBudgetConfigurationResponsePb(); + pb.setBudget(budget); + + return pb; + } + + static UpdateBudgetConfigurationResponse fromPb(UpdateBudgetConfigurationResponsePb pb) { + UpdateBudgetConfigurationResponse model = new UpdateBudgetConfigurationResponse(); + model.setBudget(pb.getBudget()); + + return model; + } + + public static class UpdateBudgetConfigurationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateBudgetConfigurationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateBudgetConfigurationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateBudgetConfigurationResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateBudgetConfigurationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateBudgetConfigurationResponsePb pb = + mapper.readValue(p, UpdateBudgetConfigurationResponsePb.class); + return UpdateBudgetConfigurationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponsePb.java new file mode 100755 index 000000000..77c503bf9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateBudgetConfigurationResponsePb { + @JsonProperty("budget") + private BudgetConfiguration budget; + + public UpdateBudgetConfigurationResponsePb setBudget(BudgetConfiguration budget) { + this.budget = budget; + return this; + } + + public BudgetConfiguration getBudget() { + return budget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateBudgetConfigurationResponsePb that = (UpdateBudgetConfigurationResponsePb) o; + return Objects.equals(budget, that.budget); + } + + @Override + public int hashCode() { + return Objects.hash(budget); + } + + @Override + public String toString() { + return new ToStringer(UpdateBudgetConfigurationResponsePb.class) + .add("budget", budget) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java index d7216e802..9fa2c6e52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java @@ -3,26 +3,32 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update a budget policy */ @Generated +@JsonSerialize(using = UpdateBudgetPolicyRequest.UpdateBudgetPolicyRequestSerializer.class) +@JsonDeserialize(using = UpdateBudgetPolicyRequest.UpdateBudgetPolicyRequestDeserializer.class) public class UpdateBudgetPolicyRequest { /** DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy */ - @JsonIgnore - @QueryParam("limit_config") private LimitConfig limitConfig; /** Contains the BudgetPolicy details. */ - @JsonProperty("policy") private BudgetPolicy policy; /** The Id of the policy. This field is generated by Databricks and globally unique. */ - @JsonIgnore private String policyId; + private String policyId; public UpdateBudgetPolicyRequest setLimitConfig(LimitConfig limitConfig) { this.limitConfig = limitConfig; @@ -74,4 +80,45 @@ public String toString() { .add("policyId", policyId) .toString(); } + + UpdateBudgetPolicyRequestPb toPb() { + UpdateBudgetPolicyRequestPb pb = new UpdateBudgetPolicyRequestPb(); + pb.setLimitConfig(limitConfig); + pb.setPolicy(policy); + pb.setPolicyId(policyId); + + return pb; + } + + static UpdateBudgetPolicyRequest fromPb(UpdateBudgetPolicyRequestPb pb) { + UpdateBudgetPolicyRequest model = new UpdateBudgetPolicyRequest(); + model.setLimitConfig(pb.getLimitConfig()); + model.setPolicy(pb.getPolicy()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class UpdateBudgetPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateBudgetPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateBudgetPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateBudgetPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateBudgetPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateBudgetPolicyRequestPb pb = mapper.readValue(p, UpdateBudgetPolicyRequestPb.class); + return UpdateBudgetPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequestPb.java new file mode 100755 index 000000000..948c4af41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a budget policy */ +@Generated +class UpdateBudgetPolicyRequestPb { + @JsonIgnore + @QueryParam("limit_config") + private LimitConfig limitConfig; + + @JsonProperty("policy") + private BudgetPolicy policy; + + @JsonIgnore private String policyId; + + public UpdateBudgetPolicyRequestPb setLimitConfig(LimitConfig limitConfig) { + this.limitConfig = limitConfig; + return this; + } + + public LimitConfig getLimitConfig() { + return limitConfig; + } + + public UpdateBudgetPolicyRequestPb setPolicy(BudgetPolicy policy) { + this.policy = policy; + return this; + } + + public BudgetPolicy getPolicy() { + return policy; + } + + public UpdateBudgetPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateBudgetPolicyRequestPb that = (UpdateBudgetPolicyRequestPb) o; + return Objects.equals(limitConfig, that.limitConfig) + && Objects.equals(policy, that.policy) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(limitConfig, policy, policyId); + } + + @Override + public String toString() { + return new ToStringer(UpdateBudgetPolicyRequestPb.class) + .add("limitConfig", limitConfig) + .add("policy", policy) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java index 383fcd194..f0fbf2bd7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateLogDeliveryConfigurationStatusRequest + .UpdateLogDeliveryConfigurationStatusRequestSerializer.class) +@JsonDeserialize( + using = + UpdateLogDeliveryConfigurationStatusRequest + .UpdateLogDeliveryConfigurationStatusRequestDeserializer.class) public class UpdateLogDeliveryConfigurationStatusRequest { /** Databricks log delivery configuration ID */ - @JsonIgnore private String logDeliveryConfigurationId; + private String logDeliveryConfigurationId; /** * Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). @@ -19,7 +35,6 @@ public class UpdateLogDeliveryConfigurationStatusRequest { * configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration * is not supported, so disable a log delivery configuration that is no longer needed. */ - @JsonProperty("status") private LogDeliveryConfigStatus status; public UpdateLogDeliveryConfigurationStatusRequest setLogDeliveryConfigurationId( @@ -63,4 +78,49 @@ public String toString() { .add("status", status) .toString(); } + + UpdateLogDeliveryConfigurationStatusRequestPb toPb() { + UpdateLogDeliveryConfigurationStatusRequestPb pb = + new UpdateLogDeliveryConfigurationStatusRequestPb(); + pb.setLogDeliveryConfigurationId(logDeliveryConfigurationId); + pb.setStatus(status); + + return pb; + } + + static UpdateLogDeliveryConfigurationStatusRequest fromPb( + UpdateLogDeliveryConfigurationStatusRequestPb pb) { + UpdateLogDeliveryConfigurationStatusRequest model = + new UpdateLogDeliveryConfigurationStatusRequest(); + model.setLogDeliveryConfigurationId(pb.getLogDeliveryConfigurationId()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class UpdateLogDeliveryConfigurationStatusRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateLogDeliveryConfigurationStatusRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateLogDeliveryConfigurationStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateLogDeliveryConfigurationStatusRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateLogDeliveryConfigurationStatusRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateLogDeliveryConfigurationStatusRequestPb pb = + mapper.readValue(p, UpdateLogDeliveryConfigurationStatusRequestPb.class); + return UpdateLogDeliveryConfigurationStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequestPb.java new file mode 100755 index 000000000..f1d5a7acd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateLogDeliveryConfigurationStatusRequestPb { + @JsonIgnore private String logDeliveryConfigurationId; + + @JsonProperty("status") + private LogDeliveryConfigStatus status; + + public UpdateLogDeliveryConfigurationStatusRequestPb setLogDeliveryConfigurationId( + String logDeliveryConfigurationId) { + this.logDeliveryConfigurationId = logDeliveryConfigurationId; + return this; + } + + public String getLogDeliveryConfigurationId() { + return logDeliveryConfigurationId; + } + + public UpdateLogDeliveryConfigurationStatusRequestPb setStatus(LogDeliveryConfigStatus status) { + this.status = status; + return this; + } + + public LogDeliveryConfigStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLogDeliveryConfigurationStatusRequestPb that = + (UpdateLogDeliveryConfigurationStatusRequestPb) o; + return Objects.equals(logDeliveryConfigurationId, that.logDeliveryConfigurationId) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfigurationId, status); + } + + @Override + public String toString() { + return new ToStringer(UpdateLogDeliveryConfigurationStatusRequestPb.class) + .add("logDeliveryConfigurationId", logDeliveryConfigurationId) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java index 6f6574868..08cf03a93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java @@ -21,7 +21,7 @@ public CreateBillingUsageDashboardResponse create(CreateBillingUsageDashboardReq String path = String.format("/api/2.0/accounts/%s/dashboard", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateBillingUsageDashboardResponse.class); @@ -35,7 +35,7 @@ public GetBillingUsageDashboardResponse get(GetBillingUsageDashboardRequest requ String path = String.format("/api/2.0/accounts/%s/dashboard", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetBillingUsageDashboardResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java index cb830b923..1982fa7f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java @@ -4,13 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + WrappedCreateLogDeliveryConfiguration.WrappedCreateLogDeliveryConfigurationSerializer.class) +@JsonDeserialize( + using = + WrappedCreateLogDeliveryConfiguration.WrappedCreateLogDeliveryConfigurationDeserializer + .class) public class WrappedCreateLogDeliveryConfiguration { /** */ - @JsonProperty("log_delivery_configuration") private CreateLogDeliveryConfigurationParams logDeliveryConfiguration; public WrappedCreateLogDeliveryConfiguration setLogDeliveryConfiguration( @@ -42,4 +57,42 @@ public String toString() { .add("logDeliveryConfiguration", logDeliveryConfiguration) .toString(); } + + WrappedCreateLogDeliveryConfigurationPb toPb() { + WrappedCreateLogDeliveryConfigurationPb pb = new WrappedCreateLogDeliveryConfigurationPb(); + pb.setLogDeliveryConfiguration(logDeliveryConfiguration); + + return pb; + } + + static WrappedCreateLogDeliveryConfiguration fromPb(WrappedCreateLogDeliveryConfigurationPb pb) { + WrappedCreateLogDeliveryConfiguration model = new WrappedCreateLogDeliveryConfiguration(); + model.setLogDeliveryConfiguration(pb.getLogDeliveryConfiguration()); + + return model; + } + + public static class WrappedCreateLogDeliveryConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + WrappedCreateLogDeliveryConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WrappedCreateLogDeliveryConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WrappedCreateLogDeliveryConfigurationDeserializer + extends JsonDeserializer { + @Override + public WrappedCreateLogDeliveryConfiguration deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WrappedCreateLogDeliveryConfigurationPb pb = + mapper.readValue(p, WrappedCreateLogDeliveryConfigurationPb.class); + return WrappedCreateLogDeliveryConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfigurationPb.java new file mode 100755 index 000000000..366c20b49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfigurationPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WrappedCreateLogDeliveryConfigurationPb { + @JsonProperty("log_delivery_configuration") + private CreateLogDeliveryConfigurationParams logDeliveryConfiguration; + + public WrappedCreateLogDeliveryConfigurationPb setLogDeliveryConfiguration( + CreateLogDeliveryConfigurationParams logDeliveryConfiguration) { + this.logDeliveryConfiguration = logDeliveryConfiguration; + return this; + } + + public CreateLogDeliveryConfigurationParams getLogDeliveryConfiguration() { + return logDeliveryConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WrappedCreateLogDeliveryConfigurationPb that = (WrappedCreateLogDeliveryConfigurationPb) o; + return Objects.equals(logDeliveryConfiguration, that.logDeliveryConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfiguration); + } + + @Override + public String toString() { + return new ToStringer(WrappedCreateLogDeliveryConfigurationPb.class) + .add("logDeliveryConfiguration", logDeliveryConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java index 15d0080e9..e85a7a904 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = WrappedLogDeliveryConfiguration.WrappedLogDeliveryConfigurationSerializer.class) +@JsonDeserialize( + using = WrappedLogDeliveryConfiguration.WrappedLogDeliveryConfigurationDeserializer.class) public class WrappedLogDeliveryConfiguration { /** */ - @JsonProperty("log_delivery_configuration") private LogDeliveryConfiguration logDeliveryConfiguration; public WrappedLogDeliveryConfiguration setLogDeliveryConfiguration( @@ -42,4 +54,42 @@ public String toString() { .add("logDeliveryConfiguration", logDeliveryConfiguration) .toString(); } + + WrappedLogDeliveryConfigurationPb toPb() { + WrappedLogDeliveryConfigurationPb pb = new WrappedLogDeliveryConfigurationPb(); + pb.setLogDeliveryConfiguration(logDeliveryConfiguration); + + return pb; + } + + static WrappedLogDeliveryConfiguration fromPb(WrappedLogDeliveryConfigurationPb pb) { + WrappedLogDeliveryConfiguration model = new WrappedLogDeliveryConfiguration(); + model.setLogDeliveryConfiguration(pb.getLogDeliveryConfiguration()); + + return model; + } + + public static class WrappedLogDeliveryConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + WrappedLogDeliveryConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WrappedLogDeliveryConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WrappedLogDeliveryConfigurationDeserializer + extends JsonDeserializer { + @Override + public WrappedLogDeliveryConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WrappedLogDeliveryConfigurationPb pb = + mapper.readValue(p, WrappedLogDeliveryConfigurationPb.class); + return WrappedLogDeliveryConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationPb.java new file mode 100755 index 000000000..e199e1698 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WrappedLogDeliveryConfigurationPb { + @JsonProperty("log_delivery_configuration") + private LogDeliveryConfiguration logDeliveryConfiguration; + + public WrappedLogDeliveryConfigurationPb setLogDeliveryConfiguration( + LogDeliveryConfiguration logDeliveryConfiguration) { + this.logDeliveryConfiguration = logDeliveryConfiguration; + return this; + } + + public LogDeliveryConfiguration getLogDeliveryConfiguration() { + return logDeliveryConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WrappedLogDeliveryConfigurationPb that = (WrappedLogDeliveryConfigurationPb) o; + return Objects.equals(logDeliveryConfiguration, that.logDeliveryConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfiguration); + } + + @Override + public String toString() { + return new ToStringer(WrappedLogDeliveryConfigurationPb.class) + .add("logDeliveryConfiguration", logDeliveryConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java index ddb9ba7d7..3f909736a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = WrappedLogDeliveryConfigurations.WrappedLogDeliveryConfigurationsSerializer.class) +@JsonDeserialize( + using = WrappedLogDeliveryConfigurations.WrappedLogDeliveryConfigurationsDeserializer.class) public class WrappedLogDeliveryConfigurations { /** */ - @JsonProperty("log_delivery_configurations") private Collection logDeliveryConfigurations; public WrappedLogDeliveryConfigurations setLogDeliveryConfigurations( @@ -43,4 +55,42 @@ public String toString() { .add("logDeliveryConfigurations", logDeliveryConfigurations) .toString(); } + + WrappedLogDeliveryConfigurationsPb toPb() { + WrappedLogDeliveryConfigurationsPb pb = new WrappedLogDeliveryConfigurationsPb(); + pb.setLogDeliveryConfigurations(logDeliveryConfigurations); + + return pb; + } + + static WrappedLogDeliveryConfigurations fromPb(WrappedLogDeliveryConfigurationsPb pb) { + WrappedLogDeliveryConfigurations model = new WrappedLogDeliveryConfigurations(); + model.setLogDeliveryConfigurations(pb.getLogDeliveryConfigurations()); + + return model; + } + + public static class WrappedLogDeliveryConfigurationsSerializer + extends JsonSerializer { + @Override + public void serialize( + WrappedLogDeliveryConfigurations value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WrappedLogDeliveryConfigurationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WrappedLogDeliveryConfigurationsDeserializer + extends JsonDeserializer { + @Override + public WrappedLogDeliveryConfigurations deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WrappedLogDeliveryConfigurationsPb pb = + mapper.readValue(p, WrappedLogDeliveryConfigurationsPb.class); + return WrappedLogDeliveryConfigurations.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationsPb.java new file mode 100755 index 000000000..2db5453f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurationsPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WrappedLogDeliveryConfigurationsPb { + @JsonProperty("log_delivery_configurations") + private Collection logDeliveryConfigurations; + + public WrappedLogDeliveryConfigurationsPb setLogDeliveryConfigurations( + Collection logDeliveryConfigurations) { + this.logDeliveryConfigurations = logDeliveryConfigurations; + return this; + } + + public Collection getLogDeliveryConfigurations() { + return logDeliveryConfigurations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WrappedLogDeliveryConfigurationsPb that = (WrappedLogDeliveryConfigurationsPb) o; + return Objects.equals(logDeliveryConfigurations, that.logDeliveryConfigurations); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfigurations); + } + + @Override + public String toString() { + return new ToStringer(WrappedLogDeliveryConfigurationsPb.class) + .add("logDeliveryConfigurations", logDeliveryConfigurations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java index ad7867175..44cd8aea7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java @@ -24,7 +24,7 @@ public void create(AccountsCreateMetastoreAssignment request) { apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CreateResponse.class); @@ -41,7 +41,7 @@ public void delete(DeleteAccountMetastoreAssignmentRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public AccountsMetastoreAssignment get(GetAccountMetastoreAssignmentRequest requ apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AccountsMetastoreAssignment.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public ListAccountMetastoreAssignmentsResponse list( apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAccountMetastoreAssignmentsResponse.class); } catch (IOException e) { @@ -90,7 +90,7 @@ public void update(AccountsUpdateMetastoreAssignment request) { apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java index b8a166770..9efb1f5ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java @@ -21,7 +21,7 @@ public AccountsMetastoreInfo create(AccountsCreateMetastore request) { String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountsMetastoreInfo.class); @@ -38,7 +38,7 @@ public void delete(DeleteAccountMetastoreRequest request) { apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -54,7 +54,7 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AccountsMetastoreInfo.class); } catch (IOException e) { @@ -82,7 +82,7 @@ public AccountsMetastoreInfo update(AccountsUpdateMetastore request) { apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountsMetastoreInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java index fdb1c75e5..8ad73fd73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java @@ -24,7 +24,7 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountsStorageCredentialInfo.class); @@ -43,7 +43,7 @@ public void delete(DeleteAccountStorageCredentialRequest request) { request.getStorageCredentialName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public AccountsStorageCredentialInfo get(GetAccountStorageCredentialRequest requ request.getStorageCredentialName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AccountsStorageCredentialInfo.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public ListAccountStorageCredentialsResponse list(ListAccountStorageCredentialsR apiClient.configuredAccountID(), request.getMetastoreId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAccountStorageCredentialsResponse.class); } catch (IOException e) { @@ -95,7 +95,7 @@ public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential requ request.getStorageCredentialName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountsStorageCredentialInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java index b297cbf98..f6e841741 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountsCreateMetastore.AccountsCreateMetastoreSerializer.class) +@JsonDeserialize(using = AccountsCreateMetastore.AccountsCreateMetastoreDeserializer.class) public class AccountsCreateMetastore { /** */ - @JsonProperty("metastore_info") private CreateMetastore metastoreInfo; public AccountsCreateMetastore setMetastoreInfo(CreateMetastore metastoreInfo) { @@ -41,4 +51,41 @@ public String toString() { .add("metastoreInfo", metastoreInfo) .toString(); } + + AccountsCreateMetastorePb toPb() { + AccountsCreateMetastorePb pb = new AccountsCreateMetastorePb(); + pb.setMetastoreInfo(metastoreInfo); + + return pb; + } + + static AccountsCreateMetastore fromPb(AccountsCreateMetastorePb pb) { + AccountsCreateMetastore model = new AccountsCreateMetastore(); + model.setMetastoreInfo(pb.getMetastoreInfo()); + + return model; + } + + public static class AccountsCreateMetastoreSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsCreateMetastore value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsCreateMetastorePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsCreateMetastoreDeserializer + extends JsonDeserializer { + @Override + public AccountsCreateMetastore deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsCreateMetastorePb pb = mapper.readValue(p, AccountsCreateMetastorePb.class); + return AccountsCreateMetastore.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java index fa3e7a1e7..e06d8223f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AccountsCreateMetastoreAssignment.AccountsCreateMetastoreAssignmentSerializer.class) +@JsonDeserialize( + using = AccountsCreateMetastoreAssignment.AccountsCreateMetastoreAssignmentDeserializer.class) public class AccountsCreateMetastoreAssignment { /** */ - @JsonProperty("metastore_assignment") private CreateMetastoreAssignment metastoreAssignment; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public AccountsCreateMetastoreAssignment setMetastoreAssignment( CreateMetastoreAssignment metastoreAssignment) { @@ -71,4 +82,46 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + AccountsCreateMetastoreAssignmentPb toPb() { + AccountsCreateMetastoreAssignmentPb pb = new AccountsCreateMetastoreAssignmentPb(); + pb.setMetastoreAssignment(metastoreAssignment); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static AccountsCreateMetastoreAssignment fromPb(AccountsCreateMetastoreAssignmentPb pb) { + AccountsCreateMetastoreAssignment model = new AccountsCreateMetastoreAssignment(); + model.setMetastoreAssignment(pb.getMetastoreAssignment()); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class AccountsCreateMetastoreAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsCreateMetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsCreateMetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsCreateMetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public AccountsCreateMetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsCreateMetastoreAssignmentPb pb = + mapper.readValue(p, AccountsCreateMetastoreAssignmentPb.class); + return AccountsCreateMetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentPb.java new file mode 100755 index 000000000..0145fbd0f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsCreateMetastoreAssignmentPb { + @JsonProperty("metastore_assignment") + private CreateMetastoreAssignment metastoreAssignment; + + @JsonIgnore private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public AccountsCreateMetastoreAssignmentPb setMetastoreAssignment( + CreateMetastoreAssignment metastoreAssignment) { + this.metastoreAssignment = metastoreAssignment; + return this; + } + + public CreateMetastoreAssignment getMetastoreAssignment() { + return metastoreAssignment; + } + + public AccountsCreateMetastoreAssignmentPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public AccountsCreateMetastoreAssignmentPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsCreateMetastoreAssignmentPb that = (AccountsCreateMetastoreAssignmentPb) o; + return Objects.equals(metastoreAssignment, that.metastoreAssignment) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreAssignment, metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(AccountsCreateMetastoreAssignmentPb.class) + .add("metastoreAssignment", metastoreAssignment) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastorePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastorePb.java new file mode 100755 index 000000000..5aef01c4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastorePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsCreateMetastorePb { + @JsonProperty("metastore_info") + private CreateMetastore metastoreInfo; + + public AccountsCreateMetastorePb setMetastoreInfo(CreateMetastore metastoreInfo) { + this.metastoreInfo = metastoreInfo; + return this; + } + + public CreateMetastore getMetastoreInfo() { + return metastoreInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsCreateMetastorePb that = (AccountsCreateMetastorePb) o; + return Objects.equals(metastoreInfo, that.metastoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsCreateMetastorePb.class) + .add("metastoreInfo", metastoreInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java index c1c33ea9f..bb6199688 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AccountsCreateStorageCredential.AccountsCreateStorageCredentialSerializer.class) +@JsonDeserialize( + using = AccountsCreateStorageCredential.AccountsCreateStorageCredentialDeserializer.class) public class AccountsCreateStorageCredential { /** */ - @JsonProperty("credential_info") private CreateStorageCredential credentialInfo; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; public AccountsCreateStorageCredential setCredentialInfo(CreateStorageCredential credentialInfo) { this.credentialInfo = credentialInfo; @@ -56,4 +67,44 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + AccountsCreateStorageCredentialPb toPb() { + AccountsCreateStorageCredentialPb pb = new AccountsCreateStorageCredentialPb(); + pb.setCredentialInfo(credentialInfo); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static AccountsCreateStorageCredential fromPb(AccountsCreateStorageCredentialPb pb) { + AccountsCreateStorageCredential model = new AccountsCreateStorageCredential(); + model.setCredentialInfo(pb.getCredentialInfo()); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class AccountsCreateStorageCredentialSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsCreateStorageCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsCreateStorageCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsCreateStorageCredentialDeserializer + extends JsonDeserializer { + @Override + public AccountsCreateStorageCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsCreateStorageCredentialPb pb = + mapper.readValue(p, AccountsCreateStorageCredentialPb.class); + return AccountsCreateStorageCredential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialPb.java new file mode 100755 index 000000000..8400e9cd1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsCreateStorageCredentialPb { + @JsonProperty("credential_info") + private CreateStorageCredential credentialInfo; + + @JsonIgnore private String metastoreId; + + public AccountsCreateStorageCredentialPb setCredentialInfo( + CreateStorageCredential credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public CreateStorageCredential getCredentialInfo() { + return credentialInfo; + } + + public AccountsCreateStorageCredentialPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsCreateStorageCredentialPb that = (AccountsCreateStorageCredentialPb) o; + return Objects.equals(credentialInfo, that.credentialInfo) + && Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo, metastoreId); + } + + @Override + public String toString() { + return new ToStringer(AccountsCreateStorageCredentialPb.class) + .add("credentialInfo", credentialInfo) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java index bf989d674..9402a31f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountsMetastoreAssignment.AccountsMetastoreAssignmentSerializer.class) +@JsonDeserialize(using = AccountsMetastoreAssignment.AccountsMetastoreAssignmentDeserializer.class) public class AccountsMetastoreAssignment { /** */ - @JsonProperty("metastore_assignment") private MetastoreAssignment metastoreAssignment; public AccountsMetastoreAssignment setMetastoreAssignment( @@ -42,4 +52,41 @@ public String toString() { .add("metastoreAssignment", metastoreAssignment) .toString(); } + + AccountsMetastoreAssignmentPb toPb() { + AccountsMetastoreAssignmentPb pb = new AccountsMetastoreAssignmentPb(); + pb.setMetastoreAssignment(metastoreAssignment); + + return pb; + } + + static AccountsMetastoreAssignment fromPb(AccountsMetastoreAssignmentPb pb) { + AccountsMetastoreAssignment model = new AccountsMetastoreAssignment(); + model.setMetastoreAssignment(pb.getMetastoreAssignment()); + + return model; + } + + public static class AccountsMetastoreAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsMetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsMetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsMetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public AccountsMetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsMetastoreAssignmentPb pb = mapper.readValue(p, AccountsMetastoreAssignmentPb.class); + return AccountsMetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignmentPb.java new file mode 100755 index 000000000..d0f2aa60c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignmentPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsMetastoreAssignmentPb { + @JsonProperty("metastore_assignment") + private MetastoreAssignment metastoreAssignment; + + public AccountsMetastoreAssignmentPb setMetastoreAssignment( + MetastoreAssignment metastoreAssignment) { + this.metastoreAssignment = metastoreAssignment; + return this; + } + + public MetastoreAssignment getMetastoreAssignment() { + return metastoreAssignment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsMetastoreAssignmentPb that = (AccountsMetastoreAssignmentPb) o; + return Objects.equals(metastoreAssignment, that.metastoreAssignment); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreAssignment); + } + + @Override + public String toString() { + return new ToStringer(AccountsMetastoreAssignmentPb.class) + .add("metastoreAssignment", metastoreAssignment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java index 249aeb544..88ce30182 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountsMetastoreInfo.AccountsMetastoreInfoSerializer.class) +@JsonDeserialize(using = AccountsMetastoreInfo.AccountsMetastoreInfoDeserializer.class) public class AccountsMetastoreInfo { /** */ - @JsonProperty("metastore_info") private MetastoreInfo metastoreInfo; public AccountsMetastoreInfo setMetastoreInfo(MetastoreInfo metastoreInfo) { @@ -41,4 +51,41 @@ public String toString() { .add("metastoreInfo", metastoreInfo) .toString(); } + + AccountsMetastoreInfoPb toPb() { + AccountsMetastoreInfoPb pb = new AccountsMetastoreInfoPb(); + pb.setMetastoreInfo(metastoreInfo); + + return pb; + } + + static AccountsMetastoreInfo fromPb(AccountsMetastoreInfoPb pb) { + AccountsMetastoreInfo model = new AccountsMetastoreInfo(); + model.setMetastoreInfo(pb.getMetastoreInfo()); + + return model; + } + + public static class AccountsMetastoreInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsMetastoreInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsMetastoreInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsMetastoreInfoDeserializer + extends JsonDeserializer { + @Override + public AccountsMetastoreInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsMetastoreInfoPb pb = mapper.readValue(p, AccountsMetastoreInfoPb.class); + return AccountsMetastoreInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfoPb.java new file mode 100755 index 000000000..1023c2b5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfoPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsMetastoreInfoPb { + @JsonProperty("metastore_info") + private MetastoreInfo metastoreInfo; + + public AccountsMetastoreInfoPb setMetastoreInfo(MetastoreInfo metastoreInfo) { + this.metastoreInfo = metastoreInfo; + return this; + } + + public MetastoreInfo getMetastoreInfo() { + return metastoreInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsMetastoreInfoPb that = (AccountsMetastoreInfoPb) o; + return Objects.equals(metastoreInfo, that.metastoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsMetastoreInfoPb.class) + .add("metastoreInfo", metastoreInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java index 696342a98..af0956b2b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountsStorageCredentialInfo.AccountsStorageCredentialInfoSerializer.class) +@JsonDeserialize( + using = AccountsStorageCredentialInfo.AccountsStorageCredentialInfoDeserializer.class) public class AccountsStorageCredentialInfo { /** */ - @JsonProperty("credential_info") private StorageCredentialInfo credentialInfo; public AccountsStorageCredentialInfo setCredentialInfo(StorageCredentialInfo credentialInfo) { @@ -41,4 +52,42 @@ public String toString() { .add("credentialInfo", credentialInfo) .toString(); } + + AccountsStorageCredentialInfoPb toPb() { + AccountsStorageCredentialInfoPb pb = new AccountsStorageCredentialInfoPb(); + pb.setCredentialInfo(credentialInfo); + + return pb; + } + + static AccountsStorageCredentialInfo fromPb(AccountsStorageCredentialInfoPb pb) { + AccountsStorageCredentialInfo model = new AccountsStorageCredentialInfo(); + model.setCredentialInfo(pb.getCredentialInfo()); + + return model; + } + + public static class AccountsStorageCredentialInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsStorageCredentialInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsStorageCredentialInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsStorageCredentialInfoDeserializer + extends JsonDeserializer { + @Override + public AccountsStorageCredentialInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsStorageCredentialInfoPb pb = + mapper.readValue(p, AccountsStorageCredentialInfoPb.class); + return AccountsStorageCredentialInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfoPb.java similarity index 59% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfoPb.java index 9dcaed06c..aacce43f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfoPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.ml; +package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -8,18 +8,16 @@ import java.util.Objects; @Generated -public class GetCredentialsForTraceDataUploadResponse { - /** The artifact upload credentials for the specified trace data. */ +class AccountsStorageCredentialInfoPb { @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; + private StorageCredentialInfo credentialInfo; - public GetCredentialsForTraceDataUploadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { + public AccountsStorageCredentialInfoPb setCredentialInfo(StorageCredentialInfo credentialInfo) { this.credentialInfo = credentialInfo; return this; } - public ArtifactCredentialInfo getCredentialInfo() { + public StorageCredentialInfo getCredentialInfo() { return credentialInfo; } @@ -27,7 +25,7 @@ public ArtifactCredentialInfo getCredentialInfo() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o; + AccountsStorageCredentialInfoPb that = (AccountsStorageCredentialInfoPb) o; return Objects.equals(credentialInfo, that.credentialInfo); } @@ -38,7 +36,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadResponse.class) + return new ToStringer(AccountsStorageCredentialInfoPb.class) .add("credentialInfo", credentialInfo) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java index 044d8c6f2..41cc9cdb2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountsUpdateMetastore.AccountsUpdateMetastoreSerializer.class) +@JsonDeserialize(using = AccountsUpdateMetastore.AccountsUpdateMetastoreDeserializer.class) public class AccountsUpdateMetastore { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** */ - @JsonProperty("metastore_info") private UpdateMetastore metastoreInfo; public AccountsUpdateMetastore setMetastoreId(String metastoreId) { @@ -56,4 +65,43 @@ public String toString() { .add("metastoreInfo", metastoreInfo) .toString(); } + + AccountsUpdateMetastorePb toPb() { + AccountsUpdateMetastorePb pb = new AccountsUpdateMetastorePb(); + pb.setMetastoreId(metastoreId); + pb.setMetastoreInfo(metastoreInfo); + + return pb; + } + + static AccountsUpdateMetastore fromPb(AccountsUpdateMetastorePb pb) { + AccountsUpdateMetastore model = new AccountsUpdateMetastore(); + model.setMetastoreId(pb.getMetastoreId()); + model.setMetastoreInfo(pb.getMetastoreInfo()); + + return model; + } + + public static class AccountsUpdateMetastoreSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsUpdateMetastore value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsUpdateMetastorePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsUpdateMetastoreDeserializer + extends JsonDeserializer { + @Override + public AccountsUpdateMetastore deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsUpdateMetastorePb pb = mapper.readValue(p, AccountsUpdateMetastorePb.class); + return AccountsUpdateMetastore.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java index 3ce7c6f48..38fce325a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AccountsUpdateMetastoreAssignment.AccountsUpdateMetastoreAssignmentSerializer.class) +@JsonDeserialize( + using = AccountsUpdateMetastoreAssignment.AccountsUpdateMetastoreAssignmentDeserializer.class) public class AccountsUpdateMetastoreAssignment { /** */ - @JsonProperty("metastore_assignment") private UpdateMetastoreAssignment metastoreAssignment; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public AccountsUpdateMetastoreAssignment setMetastoreAssignment( UpdateMetastoreAssignment metastoreAssignment) { @@ -71,4 +82,46 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + AccountsUpdateMetastoreAssignmentPb toPb() { + AccountsUpdateMetastoreAssignmentPb pb = new AccountsUpdateMetastoreAssignmentPb(); + pb.setMetastoreAssignment(metastoreAssignment); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static AccountsUpdateMetastoreAssignment fromPb(AccountsUpdateMetastoreAssignmentPb pb) { + AccountsUpdateMetastoreAssignment model = new AccountsUpdateMetastoreAssignment(); + model.setMetastoreAssignment(pb.getMetastoreAssignment()); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class AccountsUpdateMetastoreAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsUpdateMetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsUpdateMetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsUpdateMetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public AccountsUpdateMetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsUpdateMetastoreAssignmentPb pb = + mapper.readValue(p, AccountsUpdateMetastoreAssignmentPb.class); + return AccountsUpdateMetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentPb.java new file mode 100755 index 000000000..d884ee9f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsUpdateMetastoreAssignmentPb { + @JsonProperty("metastore_assignment") + private UpdateMetastoreAssignment metastoreAssignment; + + @JsonIgnore private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public AccountsUpdateMetastoreAssignmentPb setMetastoreAssignment( + UpdateMetastoreAssignment metastoreAssignment) { + this.metastoreAssignment = metastoreAssignment; + return this; + } + + public UpdateMetastoreAssignment getMetastoreAssignment() { + return metastoreAssignment; + } + + public AccountsUpdateMetastoreAssignmentPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public AccountsUpdateMetastoreAssignmentPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsUpdateMetastoreAssignmentPb that = (AccountsUpdateMetastoreAssignmentPb) o; + return Objects.equals(metastoreAssignment, that.metastoreAssignment) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreAssignment, metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateMetastoreAssignmentPb.class) + .add("metastoreAssignment", metastoreAssignment) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastorePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastorePb.java new file mode 100755 index 000000000..abce302d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastorePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsUpdateMetastorePb { + @JsonIgnore private String metastoreId; + + @JsonProperty("metastore_info") + private UpdateMetastore metastoreInfo; + + public AccountsUpdateMetastorePb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public AccountsUpdateMetastorePb setMetastoreInfo(UpdateMetastore metastoreInfo) { + this.metastoreInfo = metastoreInfo; + return this; + } + + public UpdateMetastore getMetastoreInfo() { + return metastoreInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsUpdateMetastorePb that = (AccountsUpdateMetastorePb) o; + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(metastoreInfo, that.metastoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId, metastoreInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateMetastorePb.class) + .add("metastoreId", metastoreId) + .add("metastoreInfo", metastoreInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java index bca8ab349..3ddbe2410 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AccountsUpdateStorageCredential.AccountsUpdateStorageCredentialSerializer.class) +@JsonDeserialize( + using = AccountsUpdateStorageCredential.AccountsUpdateStorageCredentialDeserializer.class) public class AccountsUpdateStorageCredential { /** */ - @JsonProperty("credential_info") private UpdateStorageCredential credentialInfo; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Name of the storage credential. */ - @JsonIgnore private String storageCredentialName; + private String storageCredentialName; public AccountsUpdateStorageCredential setCredentialInfo(UpdateStorageCredential credentialInfo) { this.credentialInfo = credentialInfo; @@ -70,4 +81,46 @@ public String toString() { .add("storageCredentialName", storageCredentialName) .toString(); } + + AccountsUpdateStorageCredentialPb toPb() { + AccountsUpdateStorageCredentialPb pb = new AccountsUpdateStorageCredentialPb(); + pb.setCredentialInfo(credentialInfo); + pb.setMetastoreId(metastoreId); + pb.setStorageCredentialName(storageCredentialName); + + return pb; + } + + static AccountsUpdateStorageCredential fromPb(AccountsUpdateStorageCredentialPb pb) { + AccountsUpdateStorageCredential model = new AccountsUpdateStorageCredential(); + model.setCredentialInfo(pb.getCredentialInfo()); + model.setMetastoreId(pb.getMetastoreId()); + model.setStorageCredentialName(pb.getStorageCredentialName()); + + return model; + } + + public static class AccountsUpdateStorageCredentialSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountsUpdateStorageCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountsUpdateStorageCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountsUpdateStorageCredentialDeserializer + extends JsonDeserializer { + @Override + public AccountsUpdateStorageCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountsUpdateStorageCredentialPb pb = + mapper.readValue(p, AccountsUpdateStorageCredentialPb.class); + return AccountsUpdateStorageCredential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialPb.java new file mode 100755 index 000000000..55bc3ddff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountsUpdateStorageCredentialPb { + @JsonProperty("credential_info") + private UpdateStorageCredential credentialInfo; + + @JsonIgnore private String metastoreId; + + @JsonIgnore private String storageCredentialName; + + public AccountsUpdateStorageCredentialPb setCredentialInfo( + UpdateStorageCredential credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public UpdateStorageCredential getCredentialInfo() { + return credentialInfo; + } + + public AccountsUpdateStorageCredentialPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public AccountsUpdateStorageCredentialPb setStorageCredentialName(String storageCredentialName) { + this.storageCredentialName = storageCredentialName; + return this; + } + + public String getStorageCredentialName() { + return storageCredentialName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsUpdateStorageCredentialPb that = (AccountsUpdateStorageCredentialPb) o; + return Objects.equals(credentialInfo, that.credentialInfo) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(storageCredentialName, that.storageCredentialName); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo, metastoreId, storageCredentialName); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateStorageCredentialPb.class) + .add("credentialInfo", credentialInfo) + .add("metastoreId", metastoreId) + .add("storageCredentialName", storageCredentialName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java index 51acd4d38..d98a1f931 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ArtifactAllowlistInfo.ArtifactAllowlistInfoSerializer.class) +@JsonDeserialize(using = ArtifactAllowlistInfo.ArtifactAllowlistInfoDeserializer.class) public class ArtifactAllowlistInfo { /** A list of allowed artifact match patterns. */ - @JsonProperty("artifact_matchers") private Collection artifactMatchers; /** Time at which this artifact allowlist was set, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of the user who set the artifact allowlist. */ - @JsonProperty("created_by") private String createdBy; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; public ArtifactAllowlistInfo setArtifactMatchers(Collection artifactMatchers) { @@ -87,4 +94,47 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + ArtifactAllowlistInfoPb toPb() { + ArtifactAllowlistInfoPb pb = new ArtifactAllowlistInfoPb(); + pb.setArtifactMatchers(artifactMatchers); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static ArtifactAllowlistInfo fromPb(ArtifactAllowlistInfoPb pb) { + ArtifactAllowlistInfo model = new ArtifactAllowlistInfo(); + model.setArtifactMatchers(pb.getArtifactMatchers()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class ArtifactAllowlistInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + ArtifactAllowlistInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ArtifactAllowlistInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ArtifactAllowlistInfoDeserializer + extends JsonDeserializer { + @Override + public ArtifactAllowlistInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ArtifactAllowlistInfoPb pb = mapper.readValue(p, ArtifactAllowlistInfoPb.class); + return ArtifactAllowlistInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfoPb.java new file mode 100755 index 000000000..aacbf4e1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfoPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ArtifactAllowlistInfoPb { + @JsonProperty("artifact_matchers") + private Collection artifactMatchers; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("metastore_id") + private String metastoreId; + + public ArtifactAllowlistInfoPb setArtifactMatchers(Collection artifactMatchers) { + this.artifactMatchers = artifactMatchers; + return this; + } + + public Collection getArtifactMatchers() { + return artifactMatchers; + } + + public ArtifactAllowlistInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ArtifactAllowlistInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ArtifactAllowlistInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactAllowlistInfoPb that = (ArtifactAllowlistInfoPb) o; + return Objects.equals(artifactMatchers, that.artifactMatchers) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(artifactMatchers, createdAt, createdBy, metastoreId); + } + + @Override + public String toString() { + return new ToStringer(ArtifactAllowlistInfoPb.class) + .add("artifactMatchers", artifactMatchers) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java index 56ef167fa..3d66995d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java @@ -22,7 +22,7 @@ public ArtifactAllowlistInfo get(GetArtifactAllowlistRequest request) { String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ArtifactAllowlistInfo.class); } catch (IOException e) { @@ -36,7 +36,7 @@ public ArtifactAllowlistInfo update(SetArtifactAllowlist request) { String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ArtifactAllowlistInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java index 22cb8d72c..fa08c8c02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ArtifactMatcher.ArtifactMatcherSerializer.class) +@JsonDeserialize(using = ArtifactMatcher.ArtifactMatcherDeserializer.class) public class ArtifactMatcher { /** The artifact path or maven coordinate */ - @JsonProperty("artifact") private String artifact; /** The pattern matching type of the artifact */ - @JsonProperty("match_type") private MatchType matchType; public ArtifactMatcher setArtifact(String artifact) { @@ -55,4 +64,40 @@ public String toString() { .add("matchType", matchType) .toString(); } + + ArtifactMatcherPb toPb() { + ArtifactMatcherPb pb = new ArtifactMatcherPb(); + pb.setArtifact(artifact); + pb.setMatchType(matchType); + + return pb; + } + + static ArtifactMatcher fromPb(ArtifactMatcherPb pb) { + ArtifactMatcher model = new ArtifactMatcher(); + model.setArtifact(pb.getArtifact()); + model.setMatchType(pb.getMatchType()); + + return model; + } + + public static class ArtifactMatcherSerializer extends JsonSerializer { + @Override + public void serialize(ArtifactMatcher value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ArtifactMatcherPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ArtifactMatcherDeserializer extends JsonDeserializer { + @Override + public ArtifactMatcher deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ArtifactMatcherPb pb = mapper.readValue(p, ArtifactMatcherPb.class); + return ArtifactMatcher.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcherPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcherPb.java new file mode 100755 index 000000000..b3c06de99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcherPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ArtifactMatcherPb { + @JsonProperty("artifact") + private String artifact; + + @JsonProperty("match_type") + private MatchType matchType; + + public ArtifactMatcherPb setArtifact(String artifact) { + this.artifact = artifact; + return this; + } + + public String getArtifact() { + return artifact; + } + + public ArtifactMatcherPb setMatchType(MatchType matchType) { + this.matchType = matchType; + return this; + } + + public MatchType getMatchType() { + return matchType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactMatcherPb that = (ArtifactMatcherPb) o; + return Objects.equals(artifact, that.artifact) && Objects.equals(matchType, that.matchType); + } + + @Override + public int hashCode() { + return Objects.hash(artifact, matchType); + } + + @Override + public String toString() { + return new ToStringer(ArtifactMatcherPb.class) + .add("artifact", artifact) + .add("matchType", matchType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java index d186a2af0..f592ac823 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AssignResponse.AssignResponseSerializer.class) +@JsonDeserialize(using = AssignResponse.AssignResponseDeserializer.class) public class AssignResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(AssignResponse.class).toString(); } + + AssignResponsePb toPb() { + AssignResponsePb pb = new AssignResponsePb(); + + return pb; + } + + static AssignResponse fromPb(AssignResponsePb pb) { + AssignResponse model = new AssignResponse(); + + return model; + } + + public static class AssignResponseSerializer extends JsonSerializer { + @Override + public void serialize(AssignResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AssignResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AssignResponseDeserializer extends JsonDeserializer { + @Override + public AssignResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AssignResponsePb pb = mapper.readValue(p, AssignResponsePb.class); + return AssignResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponsePb.java similarity index 81% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponsePb.java index 17de1764a..4330ce18d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponsePb.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class DeleteDatabaseCatalogResponse { +class AssignResponsePb { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteDatabaseCatalogResponse.class).toString(); + return new ToStringer(AssignResponsePb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java index 84389456a..81330de95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,24 +21,22 @@ * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. */ @Generated +@JsonSerialize(using = AwsCredentials.AwsCredentialsSerializer.class) +@JsonDeserialize(using = AwsCredentials.AwsCredentialsDeserializer.class) public class AwsCredentials { /** The access key ID that identifies the temporary credentials. */ - @JsonProperty("access_key_id") private String accessKeyId; /** * The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the * external location. */ - @JsonProperty("access_point") private String accessPoint; /** The secret access key that can be used to sign AWS API requests. */ - @JsonProperty("secret_access_key") private String secretAccessKey; /** The token that users must pass to AWS API to use the temporary credentials. */ - @JsonProperty("session_token") private String sessionToken; public AwsCredentials setAccessKeyId(String accessKeyId) { @@ -93,4 +100,44 @@ public String toString() { .add("sessionToken", sessionToken) .toString(); } + + AwsCredentialsPb toPb() { + AwsCredentialsPb pb = new AwsCredentialsPb(); + pb.setAccessKeyId(accessKeyId); + pb.setAccessPoint(accessPoint); + pb.setSecretAccessKey(secretAccessKey); + pb.setSessionToken(sessionToken); + + return pb; + } + + static AwsCredentials fromPb(AwsCredentialsPb pb) { + AwsCredentials model = new AwsCredentials(); + model.setAccessKeyId(pb.getAccessKeyId()); + model.setAccessPoint(pb.getAccessPoint()); + model.setSecretAccessKey(pb.getSecretAccessKey()); + model.setSessionToken(pb.getSessionToken()); + + return model; + } + + public static class AwsCredentialsSerializer extends JsonSerializer { + @Override + public void serialize(AwsCredentials value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsCredentialsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsCredentialsDeserializer extends JsonDeserializer { + @Override + public AwsCredentials deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsCredentialsPb pb = mapper.readValue(p, AwsCredentialsPb.class); + return AwsCredentials.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentialsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentialsPb.java new file mode 100755 index 000000000..ee6a13d82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentialsPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * AWS temporary credentials for API authentication. Read more at + * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. + */ +@Generated +class AwsCredentialsPb { + @JsonProperty("access_key_id") + private String accessKeyId; + + @JsonProperty("access_point") + private String accessPoint; + + @JsonProperty("secret_access_key") + private String secretAccessKey; + + @JsonProperty("session_token") + private String sessionToken; + + public AwsCredentialsPb setAccessKeyId(String accessKeyId) { + this.accessKeyId = accessKeyId; + return this; + } + + public String getAccessKeyId() { + return accessKeyId; + } + + public AwsCredentialsPb setAccessPoint(String accessPoint) { + this.accessPoint = accessPoint; + return this; + } + + public String getAccessPoint() { + return accessPoint; + } + + public AwsCredentialsPb setSecretAccessKey(String secretAccessKey) { + this.secretAccessKey = secretAccessKey; + return this; + } + + public String getSecretAccessKey() { + return secretAccessKey; + } + + public AwsCredentialsPb setSessionToken(String sessionToken) { + this.sessionToken = sessionToken; + return this; + } + + public String getSessionToken() { + return sessionToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsCredentialsPb that = (AwsCredentialsPb) o; + return Objects.equals(accessKeyId, that.accessKeyId) + && Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(secretAccessKey, that.secretAccessKey) + && Objects.equals(sessionToken, that.sessionToken); + } + + @Override + public int hashCode() { + return Objects.hash(accessKeyId, accessPoint, secretAccessKey, sessionToken); + } + + @Override + public String toString() { + return new ToStringer(AwsCredentialsPb.class) + .add("accessKeyId", accessKeyId) + .add("accessPoint", accessPoint) + .add("secretAccessKey", secretAccessKey) + .add("sessionToken", sessionToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java index 628bed840..e5bfc7bab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The AWS IAM role configuration */ @Generated +@JsonSerialize(using = AwsIamRole.AwsIamRoleSerializer.class) +@JsonDeserialize(using = AwsIamRole.AwsIamRoleDeserializer.class) public class AwsIamRole { /** The external ID used in role assumption to prevent the confused deputy problem. */ - @JsonProperty("external_id") private String externalId; /** The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials. */ - @JsonProperty("role_arn") private String roleArn; /** * The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity * that is going to assume the AWS IAM role. */ - @JsonProperty("unity_catalog_iam_arn") private String unityCatalogIamArn; public AwsIamRole setExternalId(String externalId) { @@ -75,4 +83,41 @@ public String toString() { .add("unityCatalogIamArn", unityCatalogIamArn) .toString(); } + + AwsIamRolePb toPb() { + AwsIamRolePb pb = new AwsIamRolePb(); + pb.setExternalId(externalId); + pb.setRoleArn(roleArn); + pb.setUnityCatalogIamArn(unityCatalogIamArn); + + return pb; + } + + static AwsIamRole fromPb(AwsIamRolePb pb) { + AwsIamRole model = new AwsIamRole(); + model.setExternalId(pb.getExternalId()); + model.setRoleArn(pb.getRoleArn()); + model.setUnityCatalogIamArn(pb.getUnityCatalogIamArn()); + + return model; + } + + public static class AwsIamRoleSerializer extends JsonSerializer { + @Override + public void serialize(AwsIamRole value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsIamRolePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsIamRoleDeserializer extends JsonDeserializer { + @Override + public AwsIamRole deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsIamRolePb pb = mapper.readValue(p, AwsIamRolePb.class); + return AwsIamRole.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRolePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRolePb.java new file mode 100755 index 000000000..0ee00123a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRolePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The AWS IAM role configuration */ +@Generated +class AwsIamRolePb { + @JsonProperty("external_id") + private String externalId; + + @JsonProperty("role_arn") + private String roleArn; + + @JsonProperty("unity_catalog_iam_arn") + private String unityCatalogIamArn; + + public AwsIamRolePb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public AwsIamRolePb setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + public AwsIamRolePb setUnityCatalogIamArn(String unityCatalogIamArn) { + this.unityCatalogIamArn = unityCatalogIamArn; + return this; + } + + public String getUnityCatalogIamArn() { + return unityCatalogIamArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRolePb that = (AwsIamRolePb) o; + return Objects.equals(externalId, that.externalId) + && Objects.equals(roleArn, that.roleArn) + && Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn); + } + + @Override + public int hashCode() { + return Objects.hash(externalId, roleArn, unityCatalogIamArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRolePb.class) + .add("externalId", externalId) + .add("roleArn", roleArn) + .add("unityCatalogIamArn", unityCatalogIamArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java index 2eac19555..b09d7be45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AwsIamRoleRequest.AwsIamRoleRequestSerializer.class) +@JsonDeserialize(using = AwsIamRoleRequest.AwsIamRoleRequestDeserializer.class) public class AwsIamRoleRequest { /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */ - @JsonProperty("role_arn") private String roleArn; public AwsIamRoleRequest setRoleArn(String roleArn) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(AwsIamRoleRequest.class).add("roleArn", roleArn).toString(); } + + AwsIamRoleRequestPb toPb() { + AwsIamRoleRequestPb pb = new AwsIamRoleRequestPb(); + pb.setRoleArn(roleArn); + + return pb; + } + + static AwsIamRoleRequest fromPb(AwsIamRoleRequestPb pb) { + AwsIamRoleRequest model = new AwsIamRoleRequest(); + model.setRoleArn(pb.getRoleArn()); + + return model; + } + + public static class AwsIamRoleRequestSerializer extends JsonSerializer { + @Override + public void serialize(AwsIamRoleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsIamRoleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsIamRoleRequestDeserializer extends JsonDeserializer { + @Override + public AwsIamRoleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsIamRoleRequestPb pb = mapper.readValue(p, AwsIamRoleRequestPb.class); + return AwsIamRoleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequestPb.java new file mode 100755 index 000000000..d03143ef1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AwsIamRoleRequestPb { + @JsonProperty("role_arn") + private String roleArn; + + public AwsIamRoleRequestPb setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRoleRequestPb that = (AwsIamRoleRequestPb) o; + return Objects.equals(roleArn, that.roleArn); + } + + @Override + public int hashCode() { + return Objects.hash(roleArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRoleRequestPb.class).add("roleArn", roleArn).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java index 50420d04a..f43329d6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AwsIamRoleResponse.AwsIamRoleResponseSerializer.class) +@JsonDeserialize(using = AwsIamRoleResponse.AwsIamRoleResponseDeserializer.class) public class AwsIamRoleResponse { /** The external ID used in role assumption to prevent confused deputy problem.. */ - @JsonProperty("external_id") private String externalId; /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */ - @JsonProperty("role_arn") private String roleArn; /** * The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity * that is going to assume the AWS IAM role. */ - @JsonProperty("unity_catalog_iam_arn") private String unityCatalogIamArn; public AwsIamRoleResponse setExternalId(String externalId) { @@ -74,4 +82,42 @@ public String toString() { .add("unityCatalogIamArn", unityCatalogIamArn) .toString(); } + + AwsIamRoleResponsePb toPb() { + AwsIamRoleResponsePb pb = new AwsIamRoleResponsePb(); + pb.setExternalId(externalId); + pb.setRoleArn(roleArn); + pb.setUnityCatalogIamArn(unityCatalogIamArn); + + return pb; + } + + static AwsIamRoleResponse fromPb(AwsIamRoleResponsePb pb) { + AwsIamRoleResponse model = new AwsIamRoleResponse(); + model.setExternalId(pb.getExternalId()); + model.setRoleArn(pb.getRoleArn()); + model.setUnityCatalogIamArn(pb.getUnityCatalogIamArn()); + + return model; + } + + public static class AwsIamRoleResponseSerializer extends JsonSerializer { + @Override + public void serialize(AwsIamRoleResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsIamRoleResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsIamRoleResponseDeserializer extends JsonDeserializer { + @Override + public AwsIamRoleResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsIamRoleResponsePb pb = mapper.readValue(p, AwsIamRoleResponsePb.class); + return AwsIamRoleResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponsePb.java new file mode 100755 index 000000000..a34beb2d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AwsIamRoleResponsePb { + @JsonProperty("external_id") + private String externalId; + + @JsonProperty("role_arn") + private String roleArn; + + @JsonProperty("unity_catalog_iam_arn") + private String unityCatalogIamArn; + + public AwsIamRoleResponsePb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public AwsIamRoleResponsePb setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + public AwsIamRoleResponsePb setUnityCatalogIamArn(String unityCatalogIamArn) { + this.unityCatalogIamArn = unityCatalogIamArn; + return this; + } + + public String getUnityCatalogIamArn() { + return unityCatalogIamArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRoleResponsePb that = (AwsIamRoleResponsePb) o; + return Objects.equals(externalId, that.externalId) + && Objects.equals(roleArn, that.roleArn) + && Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn); + } + + @Override + public int hashCode() { + return Objects.hash(externalId, roleArn, unityCatalogIamArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRoleResponsePb.class) + .add("externalId", externalId) + .add("roleArn", roleArn) + .add("unityCatalogIamArn", unityCatalogIamArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java index ebf035131..8f441274b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AwsSqsQueue.AwsSqsQueueSerializer.class) +@JsonDeserialize(using = AwsSqsQueue.AwsSqsQueueDeserializer.class) public class AwsSqsQueue { /** Unique identifier included in the name of file events managed cloud resources. */ - @JsonProperty("managed_resource_id") private String managedResourceId; /** * The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} * REQUIRED for provided_sqs. */ - @JsonProperty("queue_url") private String queueUrl; public AwsSqsQueue setManagedResourceId(String managedResourceId) { @@ -59,4 +68,39 @@ public String toString() { .add("queueUrl", queueUrl) .toString(); } + + AwsSqsQueuePb toPb() { + AwsSqsQueuePb pb = new AwsSqsQueuePb(); + pb.setManagedResourceId(managedResourceId); + pb.setQueueUrl(queueUrl); + + return pb; + } + + static AwsSqsQueue fromPb(AwsSqsQueuePb pb) { + AwsSqsQueue model = new AwsSqsQueue(); + model.setManagedResourceId(pb.getManagedResourceId()); + model.setQueueUrl(pb.getQueueUrl()); + + return model; + } + + public static class AwsSqsQueueSerializer extends JsonSerializer { + @Override + public void serialize(AwsSqsQueue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsSqsQueuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsSqsQueueDeserializer extends JsonDeserializer { + @Override + public AwsSqsQueue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsSqsQueuePb pb = mapper.readValue(p, AwsSqsQueuePb.class); + return AwsSqsQueue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueuePb.java new file mode 100755 index 000000000..654fb9c84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueuePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AwsSqsQueuePb { + @JsonProperty("managed_resource_id") + private String managedResourceId; + + @JsonProperty("queue_url") + private String queueUrl; + + public AwsSqsQueuePb setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public AwsSqsQueuePb setQueueUrl(String queueUrl) { + this.queueUrl = queueUrl; + return this; + } + + public String getQueueUrl() { + return queueUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsSqsQueuePb that = (AwsSqsQueuePb) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(queueUrl, that.queueUrl); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, queueUrl); + } + + @Override + public String toString() { + return new ToStringer(AwsSqsQueuePb.class) + .add("managedResourceId", managedResourceId) + .add("queueUrl", queueUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java index b545ea991..347b0ea6d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,12 +22,13 @@ * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token */ @Generated +@JsonSerialize(using = AzureActiveDirectoryToken.AzureActiveDirectoryTokenSerializer.class) +@JsonDeserialize(using = AzureActiveDirectoryToken.AzureActiveDirectoryTokenDeserializer.class) public class AzureActiveDirectoryToken { /** * Opaque token that contains claims that you can use in Azure Active Directory to access cloud * services. */ - @JsonProperty("aad_token") private String aadToken; public AzureActiveDirectoryToken setAadToken(String aadToken) { @@ -47,4 +57,41 @@ public int hashCode() { public String toString() { return new ToStringer(AzureActiveDirectoryToken.class).add("aadToken", aadToken).toString(); } + + AzureActiveDirectoryTokenPb toPb() { + AzureActiveDirectoryTokenPb pb = new AzureActiveDirectoryTokenPb(); + pb.setAadToken(aadToken); + + return pb; + } + + static AzureActiveDirectoryToken fromPb(AzureActiveDirectoryTokenPb pb) { + AzureActiveDirectoryToken model = new AzureActiveDirectoryToken(); + model.setAadToken(pb.getAadToken()); + + return model; + } + + public static class AzureActiveDirectoryTokenSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureActiveDirectoryToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureActiveDirectoryTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureActiveDirectoryTokenDeserializer + extends JsonDeserializer { + @Override + public AzureActiveDirectoryToken deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureActiveDirectoryTokenPb pb = mapper.readValue(p, AzureActiveDirectoryTokenPb.class); + return AzureActiveDirectoryToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryTokenPb.java new file mode 100755 index 000000000..7166be54d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryTokenPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + * Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ +@Generated +class AzureActiveDirectoryTokenPb { + @JsonProperty("aad_token") + private String aadToken; + + public AzureActiveDirectoryTokenPb setAadToken(String aadToken) { + this.aadToken = aadToken; + return this; + } + + public String getAadToken() { + return aadToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureActiveDirectoryTokenPb that = (AzureActiveDirectoryTokenPb) o; + return Objects.equals(aadToken, that.aadToken); + } + + @Override + public int hashCode() { + return Objects.hash(aadToken); + } + + @Override + public String toString() { + return new ToStringer(AzureActiveDirectoryTokenPb.class).add("aadToken", aadToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java index be3997823..7312e770c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The Azure managed identity configuration. */ @Generated +@JsonSerialize(using = AzureManagedIdentity.AzureManagedIdentitySerializer.class) +@JsonDeserialize(using = AzureManagedIdentity.AzureManagedIdentityDeserializer.class) public class AzureManagedIdentity { /** * The Azure resource ID of the Azure Databricks Access Connector. Use the format * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`. */ - @JsonProperty("access_connector_id") private String accessConnectorId; /** @@ -22,7 +32,6 @@ public class AzureManagedIdentity { * persist the credential_id once it is fetched from the credentials manager - as we only use the * protobuf serializer to store credentials, this ID gets persisted to the database. . */ - @JsonProperty("credential_id") private String credentialId; /** @@ -32,7 +41,6 @@ public class AzureManagedIdentity { * access_connector_id is used to identify the identity. If this field is not provided, then we * assume the AzureManagedIdentity is using the system-assigned identity. */ - @JsonProperty("managed_identity_id") private String managedIdentityId; public AzureManagedIdentity setAccessConnectorId(String accessConnectorId) { @@ -85,4 +93,44 @@ public String toString() { .add("managedIdentityId", managedIdentityId) .toString(); } + + AzureManagedIdentityPb toPb() { + AzureManagedIdentityPb pb = new AzureManagedIdentityPb(); + pb.setAccessConnectorId(accessConnectorId); + pb.setCredentialId(credentialId); + pb.setManagedIdentityId(managedIdentityId); + + return pb; + } + + static AzureManagedIdentity fromPb(AzureManagedIdentityPb pb) { + AzureManagedIdentity model = new AzureManagedIdentity(); + model.setAccessConnectorId(pb.getAccessConnectorId()); + model.setCredentialId(pb.getCredentialId()); + model.setManagedIdentityId(pb.getManagedIdentityId()); + + return model; + } + + public static class AzureManagedIdentitySerializer extends JsonSerializer { + @Override + public void serialize( + AzureManagedIdentity value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureManagedIdentityPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureManagedIdentityDeserializer + extends JsonDeserializer { + @Override + public AzureManagedIdentity deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureManagedIdentityPb pb = mapper.readValue(p, AzureManagedIdentityPb.class); + return AzureManagedIdentity.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityPb.java new file mode 100755 index 000000000..78b488a1a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Azure managed identity configuration. */ +@Generated +class AzureManagedIdentityPb { + @JsonProperty("access_connector_id") + private String accessConnectorId; + + @JsonProperty("credential_id") + private String credentialId; + + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentityPb setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentityPb setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public AzureManagedIdentityPb setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentityPb that = (AzureManagedIdentityPb) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(credentialId, that.credentialId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, credentialId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentityPb.class) + .add("accessConnectorId", accessConnectorId) + .add("credentialId", credentialId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java index 36122a638..d6ca7c3f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AzureManagedIdentityRequest.AzureManagedIdentityRequestSerializer.class) +@JsonDeserialize(using = AzureManagedIdentityRequest.AzureManagedIdentityRequestDeserializer.class) public class AzureManagedIdentityRequest { /** * The Azure resource ID of the Azure Databricks Access Connector. Use the format * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}. */ - @JsonProperty("access_connector_id") private String accessConnectorId; /** @@ -23,7 +33,6 @@ public class AzureManagedIdentityRequest { * access_connector_id is used to identify the identity. If this field is not provided, then we * assume the AzureManagedIdentity is for a system-assigned identity. */ - @JsonProperty("managed_identity_id") private String managedIdentityId; public AzureManagedIdentityRequest setAccessConnectorId(String accessConnectorId) { @@ -65,4 +74,43 @@ public String toString() { .add("managedIdentityId", managedIdentityId) .toString(); } + + AzureManagedIdentityRequestPb toPb() { + AzureManagedIdentityRequestPb pb = new AzureManagedIdentityRequestPb(); + pb.setAccessConnectorId(accessConnectorId); + pb.setManagedIdentityId(managedIdentityId); + + return pb; + } + + static AzureManagedIdentityRequest fromPb(AzureManagedIdentityRequestPb pb) { + AzureManagedIdentityRequest model = new AzureManagedIdentityRequest(); + model.setAccessConnectorId(pb.getAccessConnectorId()); + model.setManagedIdentityId(pb.getManagedIdentityId()); + + return model; + } + + public static class AzureManagedIdentityRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureManagedIdentityRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureManagedIdentityRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureManagedIdentityRequestDeserializer + extends JsonDeserializer { + @Override + public AzureManagedIdentityRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureManagedIdentityRequestPb pb = mapper.readValue(p, AzureManagedIdentityRequestPb.class); + return AzureManagedIdentityRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequestPb.java new file mode 100755 index 000000000..9f3043673 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AzureManagedIdentityRequestPb { + @JsonProperty("access_connector_id") + private String accessConnectorId; + + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentityRequestPb setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentityRequestPb setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentityRequestPb that = (AzureManagedIdentityRequestPb) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentityRequestPb.class) + .add("accessConnectorId", accessConnectorId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java index 91fbf7d9c..2e1a62cd9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java @@ -4,20 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AzureManagedIdentityResponse.AzureManagedIdentityResponseSerializer.class) +@JsonDeserialize( + using = AzureManagedIdentityResponse.AzureManagedIdentityResponseDeserializer.class) public class AzureManagedIdentityResponse { /** * The Azure resource ID of the Azure Databricks Access Connector. Use the format * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}. */ - @JsonProperty("access_connector_id") private String accessConnectorId; /** The Databricks internal ID that represents this managed identity. */ - @JsonProperty("credential_id") private String credentialId; /** @@ -27,7 +37,6 @@ public class AzureManagedIdentityResponse { * access_connector_id is used to identify the identity. If this field is not provided, then we * assume the AzureManagedIdentity is for a system-assigned identity. */ - @JsonProperty("managed_identity_id") private String managedIdentityId; public AzureManagedIdentityResponse setAccessConnectorId(String accessConnectorId) { @@ -80,4 +89,45 @@ public String toString() { .add("managedIdentityId", managedIdentityId) .toString(); } + + AzureManagedIdentityResponsePb toPb() { + AzureManagedIdentityResponsePb pb = new AzureManagedIdentityResponsePb(); + pb.setAccessConnectorId(accessConnectorId); + pb.setCredentialId(credentialId); + pb.setManagedIdentityId(managedIdentityId); + + return pb; + } + + static AzureManagedIdentityResponse fromPb(AzureManagedIdentityResponsePb pb) { + AzureManagedIdentityResponse model = new AzureManagedIdentityResponse(); + model.setAccessConnectorId(pb.getAccessConnectorId()); + model.setCredentialId(pb.getCredentialId()); + model.setManagedIdentityId(pb.getManagedIdentityId()); + + return model; + } + + public static class AzureManagedIdentityResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureManagedIdentityResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureManagedIdentityResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureManagedIdentityResponseDeserializer + extends JsonDeserializer { + @Override + public AzureManagedIdentityResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureManagedIdentityResponsePb pb = mapper.readValue(p, AzureManagedIdentityResponsePb.class); + return AzureManagedIdentityResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponsePb.java new file mode 100755 index 000000000..81246c341 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AzureManagedIdentityResponsePb { + @JsonProperty("access_connector_id") + private String accessConnectorId; + + @JsonProperty("credential_id") + private String credentialId; + + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentityResponsePb setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentityResponsePb setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public AzureManagedIdentityResponsePb setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentityResponsePb that = (AzureManagedIdentityResponsePb) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(credentialId, that.credentialId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, credentialId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentityResponsePb.class) + .add("accessConnectorId", accessConnectorId) + .add("credentialId", credentialId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java index 060de1960..d16208b88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java @@ -4,34 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AzureQueueStorage.AzureQueueStorageSerializer.class) +@JsonDeserialize(using = AzureQueueStorage.AzureQueueStorageDeserializer.class) public class AzureQueueStorage { /** Unique identifier included in the name of file events managed cloud resources. */ - @JsonProperty("managed_resource_id") private String managedResourceId; /** * The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} * REQUIRED for provided_aqs. */ - @JsonProperty("queue_url") private String queueUrl; /** * The resource group for the queue, event grid subscription, and external location storage * account. ONLY REQUIRED for locations with a service principal storage credential */ - @JsonProperty("resource_group") private String resourceGroup; /** * OPTIONAL: The subscription id for the queue, event grid subscription, and external location * storage account. REQUIRED for locations with a service principal storage credential */ - @JsonProperty("subscription_id") private String subscriptionId; public AzureQueueStorage setManagedResourceId(String managedResourceId) { @@ -95,4 +102,44 @@ public String toString() { .add("subscriptionId", subscriptionId) .toString(); } + + AzureQueueStoragePb toPb() { + AzureQueueStoragePb pb = new AzureQueueStoragePb(); + pb.setManagedResourceId(managedResourceId); + pb.setQueueUrl(queueUrl); + pb.setResourceGroup(resourceGroup); + pb.setSubscriptionId(subscriptionId); + + return pb; + } + + static AzureQueueStorage fromPb(AzureQueueStoragePb pb) { + AzureQueueStorage model = new AzureQueueStorage(); + model.setManagedResourceId(pb.getManagedResourceId()); + model.setQueueUrl(pb.getQueueUrl()); + model.setResourceGroup(pb.getResourceGroup()); + model.setSubscriptionId(pb.getSubscriptionId()); + + return model; + } + + public static class AzureQueueStorageSerializer extends JsonSerializer { + @Override + public void serialize(AzureQueueStorage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureQueueStoragePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureQueueStorageDeserializer extends JsonDeserializer { + @Override + public AzureQueueStorage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureQueueStoragePb pb = mapper.readValue(p, AzureQueueStoragePb.class); + return AzureQueueStorage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStoragePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStoragePb.java new file mode 100755 index 000000000..bb02db71f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStoragePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AzureQueueStoragePb { + @JsonProperty("managed_resource_id") + private String managedResourceId; + + @JsonProperty("queue_url") + private String queueUrl; + + @JsonProperty("resource_group") + private String resourceGroup; + + @JsonProperty("subscription_id") + private String subscriptionId; + + public AzureQueueStoragePb setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public AzureQueueStoragePb setQueueUrl(String queueUrl) { + this.queueUrl = queueUrl; + return this; + } + + public String getQueueUrl() { + return queueUrl; + } + + public AzureQueueStoragePb setResourceGroup(String resourceGroup) { + this.resourceGroup = resourceGroup; + return this; + } + + public String getResourceGroup() { + return resourceGroup; + } + + public AzureQueueStoragePb setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureQueueStoragePb that = (AzureQueueStoragePb) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(queueUrl, that.queueUrl) + && Objects.equals(resourceGroup, that.resourceGroup) + && Objects.equals(subscriptionId, that.subscriptionId); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, queueUrl, resourceGroup, subscriptionId); + } + + @Override + public String toString() { + return new ToStringer(AzureQueueStoragePb.class) + .add("managedResourceId", managedResourceId) + .add("queueUrl", queueUrl) + .add("resourceGroup", resourceGroup) + .add("subscriptionId", subscriptionId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java index e9d410aad..44e95df8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ @Generated +@JsonSerialize(using = AzureServicePrincipal.AzureServicePrincipalSerializer.class) +@JsonDeserialize(using = AzureServicePrincipal.AzureServicePrincipalDeserializer.class) public class AzureServicePrincipal { /** The application ID of the application registration within the referenced AAD tenant. */ - @JsonProperty("application_id") private String applicationId; /** The client secret generated for the above app ID in AAD. */ - @JsonProperty("client_secret") private String clientSecret; /** * The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application. */ - @JsonProperty("directory_id") private String directoryId; public AzureServicePrincipal setApplicationId(String applicationId) { @@ -74,4 +82,45 @@ public String toString() { .add("directoryId", directoryId) .toString(); } + + AzureServicePrincipalPb toPb() { + AzureServicePrincipalPb pb = new AzureServicePrincipalPb(); + pb.setApplicationId(applicationId); + pb.setClientSecret(clientSecret); + pb.setDirectoryId(directoryId); + + return pb; + } + + static AzureServicePrincipal fromPb(AzureServicePrincipalPb pb) { + AzureServicePrincipal model = new AzureServicePrincipal(); + model.setApplicationId(pb.getApplicationId()); + model.setClientSecret(pb.getClientSecret()); + model.setDirectoryId(pb.getDirectoryId()); + + return model; + } + + public static class AzureServicePrincipalSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureServicePrincipal value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureServicePrincipalPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureServicePrincipalDeserializer + extends JsonDeserializer { + @Override + public AzureServicePrincipal deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureServicePrincipalPb pb = mapper.readValue(p, AzureServicePrincipalPb.class); + return AzureServicePrincipal.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipalPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipalPb.java new file mode 100755 index 000000000..4b17000ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipalPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ +@Generated +class AzureServicePrincipalPb { + @JsonProperty("application_id") + private String applicationId; + + @JsonProperty("client_secret") + private String clientSecret; + + @JsonProperty("directory_id") + private String directoryId; + + public AzureServicePrincipalPb setApplicationId(String applicationId) { + this.applicationId = applicationId; + return this; + } + + public String getApplicationId() { + return applicationId; + } + + public AzureServicePrincipalPb setClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + return this; + } + + public String getClientSecret() { + return clientSecret; + } + + public AzureServicePrincipalPb setDirectoryId(String directoryId) { + this.directoryId = directoryId; + return this; + } + + public String getDirectoryId() { + return directoryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureServicePrincipalPb that = (AzureServicePrincipalPb) o; + return Objects.equals(applicationId, that.applicationId) + && Objects.equals(clientSecret, that.clientSecret) + && Objects.equals(directoryId, that.directoryId); + } + + @Override + public int hashCode() { + return Objects.hash(applicationId, clientSecret, directoryId); + } + + @Override + public String toString() { + return new ToStringer(AzureServicePrincipalPb.class) + .add("applicationId", applicationId) + .add("clientSecret", clientSecret) + .add("directoryId", directoryId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java index e3db75df8..a6c0ed0ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,9 +21,10 @@ * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ @Generated +@JsonSerialize(using = AzureUserDelegationSas.AzureUserDelegationSasSerializer.class) +@JsonDeserialize(using = AzureUserDelegationSas.AzureUserDelegationSasDeserializer.class) public class AzureUserDelegationSas { /** The signed URI (SAS Token) used to access blob services for a given path */ - @JsonProperty("sas_token") private String sasToken; public AzureUserDelegationSas setSasToken(String sasToken) { @@ -43,4 +53,41 @@ public int hashCode() { public String toString() { return new ToStringer(AzureUserDelegationSas.class).add("sasToken", sasToken).toString(); } + + AzureUserDelegationSasPb toPb() { + AzureUserDelegationSasPb pb = new AzureUserDelegationSasPb(); + pb.setSasToken(sasToken); + + return pb; + } + + static AzureUserDelegationSas fromPb(AzureUserDelegationSasPb pb) { + AzureUserDelegationSas model = new AzureUserDelegationSas(); + model.setSasToken(pb.getSasToken()); + + return model; + } + + public static class AzureUserDelegationSasSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureUserDelegationSas value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureUserDelegationSasPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureUserDelegationSasDeserializer + extends JsonDeserializer { + @Override + public AzureUserDelegationSas deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureUserDelegationSasPb pb = mapper.readValue(p, AzureUserDelegationSasPb.class); + return AzureUserDelegationSas.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSasPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSasPb.java new file mode 100755 index 000000000..4d5ae3726 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSasPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Azure temporary credentials for API authentication. Read more at + * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +@Generated +class AzureUserDelegationSasPb { + @JsonProperty("sas_token") + private String sasToken; + + public AzureUserDelegationSasPb setSasToken(String sasToken) { + this.sasToken = sasToken; + return this; + } + + public String getSasToken() { + return sasToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureUserDelegationSasPb that = (AzureUserDelegationSasPb) o; + return Objects.equals(sasToken, that.sasToken); + } + + @Override + public int hashCode() { + return Objects.hash(sasToken); + } + + @Override + public String toString() { + return new ToStringer(AzureUserDelegationSasPb.class).add("sasToken", sasToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java index 5f2408e0c..06cee92ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Cancel refresh */ @Generated +@JsonSerialize(using = CancelRefreshRequest.CancelRefreshRequestSerializer.class) +@JsonDeserialize(using = CancelRefreshRequest.CancelRefreshRequestDeserializer.class) public class CancelRefreshRequest { /** ID of the refresh. */ - @JsonIgnore private String refreshId; + private String refreshId; /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public CancelRefreshRequest setRefreshId(String refreshId) { this.refreshId = refreshId; @@ -54,4 +65,42 @@ public String toString() { .add("tableName", tableName) .toString(); } + + CancelRefreshRequestPb toPb() { + CancelRefreshRequestPb pb = new CancelRefreshRequestPb(); + pb.setRefreshId(refreshId); + pb.setTableName(tableName); + + return pb; + } + + static CancelRefreshRequest fromPb(CancelRefreshRequestPb pb) { + CancelRefreshRequest model = new CancelRefreshRequest(); + model.setRefreshId(pb.getRefreshId()); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class CancelRefreshRequestSerializer extends JsonSerializer { + @Override + public void serialize( + CancelRefreshRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelRefreshRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelRefreshRequestDeserializer + extends JsonDeserializer { + @Override + public CancelRefreshRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelRefreshRequestPb pb = mapper.readValue(p, CancelRefreshRequestPb.class); + return CancelRefreshRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequestPb.java new file mode 100755 index 000000000..8f758f934 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Cancel refresh */ +@Generated +class CancelRefreshRequestPb { + @JsonIgnore private String refreshId; + + @JsonIgnore private String tableName; + + public CancelRefreshRequestPb setRefreshId(String refreshId) { + this.refreshId = refreshId; + return this; + } + + public String getRefreshId() { + return refreshId; + } + + public CancelRefreshRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelRefreshRequestPb that = (CancelRefreshRequestPb) o; + return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(refreshId, tableName); + } + + @Override + public String toString() { + return new ToStringer(CancelRefreshRequestPb.class) + .add("refreshId", refreshId) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java index 90b729a13..8676669c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelRefreshResponse.CancelRefreshResponseSerializer.class) +@JsonDeserialize(using = CancelRefreshResponse.CancelRefreshResponseDeserializer.class) public class CancelRefreshResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(CancelRefreshResponse.class).toString(); } + + CancelRefreshResponsePb toPb() { + CancelRefreshResponsePb pb = new CancelRefreshResponsePb(); + + return pb; + } + + static CancelRefreshResponse fromPb(CancelRefreshResponsePb pb) { + CancelRefreshResponse model = new CancelRefreshResponse(); + + return model; + } + + public static class CancelRefreshResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CancelRefreshResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelRefreshResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelRefreshResponseDeserializer + extends JsonDeserializer { + @Override + public CancelRefreshResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelRefreshResponsePb pb = mapper.readValue(p, CancelRefreshResponsePb.class); + return CancelRefreshResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponsePb.java similarity index 81% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponsePb.java index 4d96f2e05..984c1625d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponsePb.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class DeleteDatabaseInstanceResponse { +class CancelRefreshResponsePb { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteDatabaseInstanceResponse.class).toString(); + return new ToStringer(CancelRefreshResponsePb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index ccdc57264..2efc73337 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -4,76 +4,72 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CatalogInfo.CatalogInfoSerializer.class) +@JsonDeserialize(using = CatalogInfo.CatalogInfoDeserializer.class) public class CatalogInfo { /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** The type of the catalog. */ - @JsonProperty("catalog_type") private CatalogType catalogType; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** The name of the connection to an external data source. */ - @JsonProperty("connection_name") private String connectionName; /** Time at which this catalog was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of catalog creator. */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; /** Whether predictive optimization should be enabled for this object and objects under it. */ - @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; /** The full name of the catalog. Corresponds with the name field. */ - @JsonProperty("full_name") private String fullName; /** * Whether the current securable is accessible from all workspaces or a specific set of * workspaces. */ - @JsonProperty("isolation_mode") private CatalogIsolationMode isolationMode; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of catalog. */ - @JsonProperty("name") private String name; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** Username of current owner of catalog. */ - @JsonProperty("owner") private String owner; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** @@ -82,35 +78,27 @@ public class CatalogInfo { *

A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing * server. */ - @JsonProperty("provider_name") private String providerName; /** Status of an asynchronously provisioned resource. */ - @JsonProperty("provisioning_info") private ProvisioningInfo provisioningInfo; /** The type of Unity Catalog securable. */ - @JsonProperty("securable_type") private SecurableType securableType; /** The name of the share under the share provider. */ - @JsonProperty("share_name") private String shareName; /** Storage Location URL (full path) for managed tables within catalog. */ - @JsonProperty("storage_location") private String storageLocation; /** Storage root URL for managed tables within catalog. */ - @JsonProperty("storage_root") private String storageRoot; /** Time at which this catalog was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified catalog. */ - @JsonProperty("updated_by") private String updatedBy; public CatalogInfo setBrowseOnly(Boolean browseOnly) { @@ -409,4 +397,81 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + CatalogInfoPb toPb() { + CatalogInfoPb pb = new CatalogInfoPb(); + pb.setBrowseOnly(browseOnly); + pb.setCatalogType(catalogType); + pb.setComment(comment); + pb.setConnectionName(connectionName); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEffectivePredictiveOptimizationFlag(effectivePredictiveOptimizationFlag); + pb.setEnablePredictiveOptimization(enablePredictiveOptimization); + pb.setFullName(fullName); + pb.setIsolationMode(isolationMode); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOptions(options); + pb.setOwner(owner); + pb.setProperties(properties); + pb.setProviderName(providerName); + pb.setProvisioningInfo(provisioningInfo); + pb.setSecurableType(securableType); + pb.setShareName(shareName); + pb.setStorageLocation(storageLocation); + pb.setStorageRoot(storageRoot); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static CatalogInfo fromPb(CatalogInfoPb pb) { + CatalogInfo model = new CatalogInfo(); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogType(pb.getCatalogType()); + model.setComment(pb.getComment()); + model.setConnectionName(pb.getConnectionName()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEffectivePredictiveOptimizationFlag(pb.getEffectivePredictiveOptimizationFlag()); + model.setEnablePredictiveOptimization(pb.getEnablePredictiveOptimization()); + model.setFullName(pb.getFullName()); + model.setIsolationMode(pb.getIsolationMode()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setOwner(pb.getOwner()); + model.setProperties(pb.getProperties()); + model.setProviderName(pb.getProviderName()); + model.setProvisioningInfo(pb.getProvisioningInfo()); + model.setSecurableType(pb.getSecurableType()); + model.setShareName(pb.getShareName()); + model.setStorageLocation(pb.getStorageLocation()); + model.setStorageRoot(pb.getStorageRoot()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class CatalogInfoSerializer extends JsonSerializer { + @Override + public void serialize(CatalogInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CatalogInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CatalogInfoDeserializer extends JsonDeserializer { + @Override + public CatalogInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CatalogInfoPb pb = mapper.readValue(p, CatalogInfoPb.class); + return CatalogInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoPb.java new file mode 100755 index 000000000..66e0e3b8c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoPb.java @@ -0,0 +1,378 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CatalogInfoPb { + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_type") + private CatalogType catalogType; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("connection_name") + private String connectionName; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; + + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("isolation_mode") + private CatalogIsolationMode isolationMode; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Map options; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("provider_name") + private String providerName; + + @JsonProperty("provisioning_info") + private ProvisioningInfo provisioningInfo; + + @JsonProperty("securable_type") + private SecurableType securableType; + + @JsonProperty("share_name") + private String shareName; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public CatalogInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public CatalogInfoPb setCatalogType(CatalogType catalogType) { + this.catalogType = catalogType; + return this; + } + + public CatalogType getCatalogType() { + return catalogType; + } + + public CatalogInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CatalogInfoPb setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + + public CatalogInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CatalogInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public CatalogInfoPb setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; + return this; + } + + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; + } + + public CatalogInfoPb setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + + public CatalogInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CatalogInfoPb setIsolationMode(CatalogIsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public CatalogIsolationMode getIsolationMode() { + return isolationMode; + } + + public CatalogInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public CatalogInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CatalogInfoPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public CatalogInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CatalogInfoPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public CatalogInfoPb setProviderName(String providerName) { + this.providerName = providerName; + return this; + } + + public String getProviderName() { + return providerName; + } + + public CatalogInfoPb setProvisioningInfo(ProvisioningInfo provisioningInfo) { + this.provisioningInfo = provisioningInfo; + return this; + } + + public ProvisioningInfo getProvisioningInfo() { + return provisioningInfo; + } + + public CatalogInfoPb setSecurableType(SecurableType securableType) { + this.securableType = securableType; + return this; + } + + public SecurableType getSecurableType() { + return securableType; + } + + public CatalogInfoPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public CatalogInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public CatalogInfoPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public CatalogInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public CatalogInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CatalogInfoPb that = (CatalogInfoPb) o; + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogType, that.catalogType) + && Objects.equals(comment, that.comment) + && Objects.equals(connectionName, that.connectionName) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) + && Objects.equals(fullName, that.fullName) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties) + && Objects.equals(providerName, that.providerName) + && Objects.equals(provisioningInfo, that.provisioningInfo) + && Objects.equals(securableType, that.securableType) + && Objects.equals(shareName, that.shareName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + browseOnly, + catalogType, + comment, + connectionName, + createdAt, + createdBy, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, + fullName, + isolationMode, + metastoreId, + name, + options, + owner, + properties, + providerName, + provisioningInfo, + securableType, + shareName, + storageLocation, + storageRoot, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(CatalogInfoPb.class) + .add("browseOnly", browseOnly) + .add("catalogType", catalogType) + .add("comment", comment) + .add("connectionName", connectionName) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) + .add("fullName", fullName) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("options", options) + .add("owner", owner) + .add("properties", properties) + .add("providerName", providerName) + .add("provisioningInfo", provisioningInfo) + .add("securableType", securableType) + .add("shareName", shareName) + .add("storageLocation", storageLocation) + .add("storageRoot", storageRoot) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java index 29649052b..8762f4300 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java @@ -21,7 +21,7 @@ public CatalogInfo create(CreateCatalog request) { String path = "/api/2.1/unity-catalog/catalogs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CatalogInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteCatalogRequest request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public CatalogInfo get(GetCatalogRequest request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CatalogInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListCatalogsResponse list(ListCatalogsRequest request) { String path = "/api/2.1/unity-catalog/catalogs"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListCatalogsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public CatalogInfo update(UpdateCatalog request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CatalogInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java index bcf1fe2c8..d4a880bcd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CloudflareApiToken.CloudflareApiTokenSerializer.class) +@JsonDeserialize(using = CloudflareApiToken.CloudflareApiTokenDeserializer.class) public class CloudflareApiToken { /** The Cloudflare access key id of the token. */ - @JsonProperty("access_key_id") private String accessKeyId; /** The account id associated with the API token. */ - @JsonProperty("account_id") private String accountId; /** The secret access token generated for the access key id */ - @JsonProperty("secret_access_key") private String secretAccessKey; public CloudflareApiToken setAccessKeyId(String accessKeyId) { @@ -71,4 +79,42 @@ public String toString() { .add("secretAccessKey", secretAccessKey) .toString(); } + + CloudflareApiTokenPb toPb() { + CloudflareApiTokenPb pb = new CloudflareApiTokenPb(); + pb.setAccessKeyId(accessKeyId); + pb.setAccountId(accountId); + pb.setSecretAccessKey(secretAccessKey); + + return pb; + } + + static CloudflareApiToken fromPb(CloudflareApiTokenPb pb) { + CloudflareApiToken model = new CloudflareApiToken(); + model.setAccessKeyId(pb.getAccessKeyId()); + model.setAccountId(pb.getAccountId()); + model.setSecretAccessKey(pb.getSecretAccessKey()); + + return model; + } + + public static class CloudflareApiTokenSerializer extends JsonSerializer { + @Override + public void serialize(CloudflareApiToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CloudflareApiTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloudflareApiTokenDeserializer extends JsonDeserializer { + @Override + public CloudflareApiToken deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CloudflareApiTokenPb pb = mapper.readValue(p, CloudflareApiTokenPb.class); + return CloudflareApiToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiTokenPb.java new file mode 100755 index 000000000..24e7e7d41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiTokenPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CloudflareApiTokenPb { + @JsonProperty("access_key_id") + private String accessKeyId; + + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("secret_access_key") + private String secretAccessKey; + + public CloudflareApiTokenPb setAccessKeyId(String accessKeyId) { + this.accessKeyId = accessKeyId; + return this; + } + + public String getAccessKeyId() { + return accessKeyId; + } + + public CloudflareApiTokenPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CloudflareApiTokenPb setSecretAccessKey(String secretAccessKey) { + this.secretAccessKey = secretAccessKey; + return this; + } + + public String getSecretAccessKey() { + return secretAccessKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CloudflareApiTokenPb that = (CloudflareApiTokenPb) o; + return Objects.equals(accessKeyId, that.accessKeyId) + && Objects.equals(accountId, that.accountId) + && Objects.equals(secretAccessKey, that.secretAccessKey); + } + + @Override + public int hashCode() { + return Objects.hash(accessKeyId, accountId, secretAccessKey); + } + + @Override + public String toString() { + return new ToStringer(CloudflareApiTokenPb.class) + .add("accessKeyId", accessKeyId) + .add("accountId", accountId) + .add("secretAccessKey", secretAccessKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java index bb8ced06f..14bc8fb4a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java @@ -4,57 +4,56 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ColumnInfo.ColumnInfoSerializer.class) +@JsonDeserialize(using = ColumnInfo.ColumnInfoDeserializer.class) public class ColumnInfo { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("mask") private ColumnMask mask; /** Name of Column. */ - @JsonProperty("name") private String name; /** Whether field may be Null (default: true). */ - @JsonProperty("nullable") private Boolean nullable; /** Partition index for column. */ - @JsonProperty("partition_index") private Long partitionIndex; /** Ordinal position of column (starting at position 0). */ - @JsonProperty("position") private Long position; /** Format of IntervalType. */ - @JsonProperty("type_interval_type") private String typeIntervalType; /** Full data type specification, JSON-serialized. */ - @JsonProperty("type_json") private String typeJson; /** */ - @JsonProperty("type_name") private ColumnTypeName typeName; /** Digits of precision; required for DecimalTypes. */ - @JsonProperty("type_precision") private Long typePrecision; /** Digits to right of decimal; Required for DecimalTypes. */ - @JsonProperty("type_scale") private Long typeScale; /** Full data type specification as SQL/catalogString text. */ - @JsonProperty("type_text") private String typeText; public ColumnInfo setComment(String comment) { @@ -218,4 +217,59 @@ public String toString() { .add("typeText", typeText) .toString(); } + + ColumnInfoPb toPb() { + ColumnInfoPb pb = new ColumnInfoPb(); + pb.setComment(comment); + pb.setMask(mask); + pb.setName(name); + pb.setNullable(nullable); + pb.setPartitionIndex(partitionIndex); + pb.setPosition(position); + pb.setTypeIntervalType(typeIntervalType); + pb.setTypeJson(typeJson); + pb.setTypeName(typeName); + pb.setTypePrecision(typePrecision); + pb.setTypeScale(typeScale); + pb.setTypeText(typeText); + + return pb; + } + + static ColumnInfo fromPb(ColumnInfoPb pb) { + ColumnInfo model = new ColumnInfo(); + model.setComment(pb.getComment()); + model.setMask(pb.getMask()); + model.setName(pb.getName()); + model.setNullable(pb.getNullable()); + model.setPartitionIndex(pb.getPartitionIndex()); + model.setPosition(pb.getPosition()); + model.setTypeIntervalType(pb.getTypeIntervalType()); + model.setTypeJson(pb.getTypeJson()); + model.setTypeName(pb.getTypeName()); + model.setTypePrecision(pb.getTypePrecision()); + model.setTypeScale(pb.getTypeScale()); + model.setTypeText(pb.getTypeText()); + + return model; + } + + public static class ColumnInfoSerializer extends JsonSerializer { + @Override + public void serialize(ColumnInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ColumnInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ColumnInfoDeserializer extends JsonDeserializer { + @Override + public ColumnInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ColumnInfoPb pb = mapper.readValue(p, ColumnInfoPb.class); + return ColumnInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfoPb.java new file mode 100755 index 000000000..e8fe9c98c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfoPb.java @@ -0,0 +1,209 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ColumnInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("mask") + private ColumnMask mask; + + @JsonProperty("name") + private String name; + + @JsonProperty("nullable") + private Boolean nullable; + + @JsonProperty("partition_index") + private Long partitionIndex; + + @JsonProperty("position") + private Long position; + + @JsonProperty("type_interval_type") + private String typeIntervalType; + + @JsonProperty("type_json") + private String typeJson; + + @JsonProperty("type_name") + private ColumnTypeName typeName; + + @JsonProperty("type_precision") + private Long typePrecision; + + @JsonProperty("type_scale") + private Long typeScale; + + @JsonProperty("type_text") + private String typeText; + + public ColumnInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ColumnInfoPb setMask(ColumnMask mask) { + this.mask = mask; + return this; + } + + public ColumnMask getMask() { + return mask; + } + + public ColumnInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ColumnInfoPb setNullable(Boolean nullable) { + this.nullable = nullable; + return this; + } + + public Boolean getNullable() { + return nullable; + } + + public ColumnInfoPb setPartitionIndex(Long partitionIndex) { + this.partitionIndex = partitionIndex; + return this; + } + + public Long getPartitionIndex() { + return partitionIndex; + } + + public ColumnInfoPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public ColumnInfoPb setTypeIntervalType(String typeIntervalType) { + this.typeIntervalType = typeIntervalType; + return this; + } + + public String getTypeIntervalType() { + return typeIntervalType; + } + + public ColumnInfoPb setTypeJson(String typeJson) { + this.typeJson = typeJson; + return this; + } + + public String getTypeJson() { + return typeJson; + } + + public ColumnInfoPb setTypeName(ColumnTypeName typeName) { + this.typeName = typeName; + return this; + } + + public ColumnTypeName getTypeName() { + return typeName; + } + + public ColumnInfoPb setTypePrecision(Long typePrecision) { + this.typePrecision = typePrecision; + return this; + } + + public Long getTypePrecision() { + return typePrecision; + } + + public ColumnInfoPb setTypeScale(Long typeScale) { + this.typeScale = typeScale; + return this; + } + + public Long getTypeScale() { + return typeScale; + } + + public ColumnInfoPb setTypeText(String typeText) { + this.typeText = typeText; + return this; + } + + public String getTypeText() { + return typeText; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ColumnInfoPb that = (ColumnInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(mask, that.mask) + && Objects.equals(name, that.name) + && Objects.equals(nullable, that.nullable) + && Objects.equals(partitionIndex, that.partitionIndex) + && Objects.equals(position, that.position) + && Objects.equals(typeIntervalType, that.typeIntervalType) + && Objects.equals(typeJson, that.typeJson) + && Objects.equals(typeName, that.typeName) + && Objects.equals(typePrecision, that.typePrecision) + && Objects.equals(typeScale, that.typeScale) + && Objects.equals(typeText, that.typeText); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + mask, + name, + nullable, + partitionIndex, + position, + typeIntervalType, + typeJson, + typeName, + typePrecision, + typeScale, + typeText); + } + + @Override + public String toString() { + return new ToStringer(ColumnInfoPb.class) + .add("comment", comment) + .add("mask", mask) + .add("name", name) + .add("nullable", nullable) + .add("partitionIndex", partitionIndex) + .add("position", position) + .add("typeIntervalType", typeIntervalType) + .add("typeJson", typeJson) + .add("typeName", typeName) + .add("typePrecision", typePrecision) + .add("typeScale", typeScale) + .add("typeText", typeText) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java index e14511b85..0388a9546 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ColumnMask.ColumnMaskSerializer.class) +@JsonDeserialize(using = ColumnMask.ColumnMaskDeserializer.class) public class ColumnMask { /** The full name of the column mask SQL UDF. */ - @JsonProperty("function_name") private String functionName; /** @@ -19,7 +29,6 @@ public class ColumnMask { * first arg of the mask function should be of the type of the column being masked and the types * of the rest of the args should match the types of columns in 'using_column_names'. */ - @JsonProperty("using_column_names") private Collection usingColumnNames; public ColumnMask setFunctionName(String functionName) { @@ -61,4 +70,39 @@ public String toString() { .add("usingColumnNames", usingColumnNames) .toString(); } + + ColumnMaskPb toPb() { + ColumnMaskPb pb = new ColumnMaskPb(); + pb.setFunctionName(functionName); + pb.setUsingColumnNames(usingColumnNames); + + return pb; + } + + static ColumnMask fromPb(ColumnMaskPb pb) { + ColumnMask model = new ColumnMask(); + model.setFunctionName(pb.getFunctionName()); + model.setUsingColumnNames(pb.getUsingColumnNames()); + + return model; + } + + public static class ColumnMaskSerializer extends JsonSerializer { + @Override + public void serialize(ColumnMask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ColumnMaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ColumnMaskDeserializer extends JsonDeserializer { + @Override + public ColumnMask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ColumnMaskPb pb = mapper.readValue(p, ColumnMaskPb.class); + return ColumnMask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskPb.java new file mode 100755 index 000000000..4c20c63a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ColumnMaskPb { + @JsonProperty("function_name") + private String functionName; + + @JsonProperty("using_column_names") + private Collection usingColumnNames; + + public ColumnMaskPb setFunctionName(String functionName) { + this.functionName = functionName; + return this; + } + + public String getFunctionName() { + return functionName; + } + + public ColumnMaskPb setUsingColumnNames(Collection usingColumnNames) { + this.usingColumnNames = usingColumnNames; + return this; + } + + public Collection getUsingColumnNames() { + return usingColumnNames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ColumnMaskPb that = (ColumnMaskPb) o; + return Objects.equals(functionName, that.functionName) + && Objects.equals(usingColumnNames, that.usingColumnNames); + } + + @Override + public int hashCode() { + return Objects.hash(functionName, usingColumnNames); + } + + @Override + public String toString() { + return new ToStringer(ColumnMaskPb.class) + .add("functionName", functionName) + .add("usingColumnNames", usingColumnNames) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 496800340..8a2afb82f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -4,82 +4,75 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ConnectionInfo.ConnectionInfoSerializer.class) +@JsonDeserialize(using = ConnectionInfo.ConnectionInfoDeserializer.class) public class ConnectionInfo { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Unique identifier of the Connection. */ - @JsonProperty("connection_id") private String connectionId; /** The type of connection. */ - @JsonProperty("connection_type") private ConnectionType connectionType; /** Time at which this connection was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of connection creator. */ - @JsonProperty("created_by") private String createdBy; /** The type of credential. */ - @JsonProperty("credential_type") private CredentialType credentialType; /** Full name of connection. */ - @JsonProperty("full_name") private String fullName; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of the connection. */ - @JsonProperty("name") private String name; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** Username of current owner of the connection. */ - @JsonProperty("owner") private String owner; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** Status of an asynchronously provisioned resource. */ - @JsonProperty("provisioning_info") private ProvisioningInfo provisioningInfo; /** If the connection is read only. */ - @JsonProperty("read_only") private Boolean readOnly; /** The type of Unity Catalog securable. */ - @JsonProperty("securable_type") private SecurableType securableType; /** Time at which this connection was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified connection. */ - @JsonProperty("updated_by") private String updatedBy; /** URL of the remote data source, extracted from options. */ - @JsonProperty("url") private String url; public ConnectionInfo setComment(String comment) { @@ -315,4 +308,72 @@ public String toString() { .add("url", url) .toString(); } + + ConnectionInfoPb toPb() { + ConnectionInfoPb pb = new ConnectionInfoPb(); + pb.setComment(comment); + pb.setConnectionId(connectionId); + pb.setConnectionType(connectionType); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setCredentialType(credentialType); + pb.setFullName(fullName); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOptions(options); + pb.setOwner(owner); + pb.setProperties(properties); + pb.setProvisioningInfo(provisioningInfo); + pb.setReadOnly(readOnly); + pb.setSecurableType(securableType); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setUrl(url); + + return pb; + } + + static ConnectionInfo fromPb(ConnectionInfoPb pb) { + ConnectionInfo model = new ConnectionInfo(); + model.setComment(pb.getComment()); + model.setConnectionId(pb.getConnectionId()); + model.setConnectionType(pb.getConnectionType()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setCredentialType(pb.getCredentialType()); + model.setFullName(pb.getFullName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setOwner(pb.getOwner()); + model.setProperties(pb.getProperties()); + model.setProvisioningInfo(pb.getProvisioningInfo()); + model.setReadOnly(pb.getReadOnly()); + model.setSecurableType(pb.getSecurableType()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class ConnectionInfoSerializer extends JsonSerializer { + @Override + public void serialize(ConnectionInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ConnectionInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ConnectionInfoDeserializer extends JsonDeserializer { + @Override + public ConnectionInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ConnectionInfoPb pb = mapper.readValue(p, ConnectionInfoPb.class); + return ConnectionInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoPb.java new file mode 100755 index 000000000..013f45cba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoPb.java @@ -0,0 +1,300 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ConnectionInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("connection_id") + private String connectionId; + + @JsonProperty("connection_type") + private ConnectionType connectionType; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("credential_type") + private CredentialType credentialType; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Map options; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("provisioning_info") + private ProvisioningInfo provisioningInfo; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("securable_type") + private SecurableType securableType; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("url") + private String url; + + public ConnectionInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ConnectionInfoPb setConnectionId(String connectionId) { + this.connectionId = connectionId; + return this; + } + + public String getConnectionId() { + return connectionId; + } + + public ConnectionInfoPb setConnectionType(ConnectionType connectionType) { + this.connectionType = connectionType; + return this; + } + + public ConnectionType getConnectionType() { + return connectionType; + } + + public ConnectionInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ConnectionInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ConnectionInfoPb setCredentialType(CredentialType credentialType) { + this.credentialType = credentialType; + return this; + } + + public CredentialType getCredentialType() { + return credentialType; + } + + public ConnectionInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public ConnectionInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ConnectionInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ConnectionInfoPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public ConnectionInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public ConnectionInfoPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public ConnectionInfoPb setProvisioningInfo(ProvisioningInfo provisioningInfo) { + this.provisioningInfo = provisioningInfo; + return this; + } + + public ProvisioningInfo getProvisioningInfo() { + return provisioningInfo; + } + + public ConnectionInfoPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public ConnectionInfoPb setSecurableType(SecurableType securableType) { + this.securableType = securableType; + return this; + } + + public SecurableType getSecurableType() { + return securableType; + } + + public ConnectionInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ConnectionInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ConnectionInfoPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectionInfoPb that = (ConnectionInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(connectionId, that.connectionId) + && Objects.equals(connectionType, that.connectionType) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(credentialType, that.credentialType) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties) + && Objects.equals(provisioningInfo, that.provisioningInfo) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(securableType, that.securableType) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + connectionId, + connectionType, + createdAt, + createdBy, + credentialType, + fullName, + metastoreId, + name, + options, + owner, + properties, + provisioningInfo, + readOnly, + securableType, + updatedAt, + updatedBy, + url); + } + + @Override + public String toString() { + return new ToStringer(ConnectionInfoPb.class) + .add("comment", comment) + .add("connectionId", connectionId) + .add("connectionType", connectionType) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("credentialType", credentialType) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("options", options) + .add("owner", owner) + .add("properties", properties) + .add("provisioningInfo", provisioningInfo) + .add("readOnly", readOnly) + .add("securableType", securableType) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index c43cb89bd..b076b2ff4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 30 */ +/** Next Id: 31 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java index cbd09dede..bc4a975fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java @@ -21,7 +21,7 @@ public ConnectionInfo create(CreateConnection request) { String path = "/api/2.1/unity-catalog/connections"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ConnectionInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteConnectionRequest request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ConnectionInfo get(GetConnectionRequest request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ConnectionInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListConnectionsResponse list(ListConnectionsRequest request) { String path = "/api/2.1/unity-catalog/connections"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListConnectionsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public ConnectionInfo update(UpdateConnection request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ConnectionInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java index 6a3636157..fc74866f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,23 +21,22 @@ * or the ONLINE_UPDATING_PIPELINE_RESOURCES state. */ @Generated +@JsonSerialize(using = ContinuousUpdateStatus.ContinuousUpdateStatusSerializer.class) +@JsonDeserialize(using = ContinuousUpdateStatus.ContinuousUpdateStatusDeserializer.class) public class ContinuousUpdateStatus { /** Progress of the initial data synchronization. */ - @JsonProperty("initial_pipeline_sync_progress") private PipelineProgress initialPipelineSyncProgress; /** * The last source table Delta version that was synced to the online table. Note that this Delta * version may not be completely synced to the online table yet. */ - @JsonProperty("last_processed_commit_version") private Long lastProcessedCommitVersion; /** * The timestamp of the last time any data was synchronized from the source table to the online * table. */ - @JsonProperty("timestamp") private String timestamp; public ContinuousUpdateStatus setInitialPipelineSyncProgress( @@ -82,4 +90,45 @@ public String toString() { .add("timestamp", timestamp) .toString(); } + + ContinuousUpdateStatusPb toPb() { + ContinuousUpdateStatusPb pb = new ContinuousUpdateStatusPb(); + pb.setInitialPipelineSyncProgress(initialPipelineSyncProgress); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + + return pb; + } + + static ContinuousUpdateStatus fromPb(ContinuousUpdateStatusPb pb) { + ContinuousUpdateStatus model = new ContinuousUpdateStatus(); + model.setInitialPipelineSyncProgress(pb.getInitialPipelineSyncProgress()); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class ContinuousUpdateStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + ContinuousUpdateStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ContinuousUpdateStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ContinuousUpdateStatusDeserializer + extends JsonDeserializer { + @Override + public ContinuousUpdateStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ContinuousUpdateStatusPb pb = mapper.readValue(p, ContinuousUpdateStatusPb.class); + return ContinuousUpdateStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatusPb.java new file mode 100755 index 000000000..18379facd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatusPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE + * or the ONLINE_UPDATING_PIPELINE_RESOURCES state. + */ +@Generated +class ContinuousUpdateStatusPb { + @JsonProperty("initial_pipeline_sync_progress") + private PipelineProgress initialPipelineSyncProgress; + + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + public ContinuousUpdateStatusPb setInitialPipelineSyncProgress( + PipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public PipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + public ContinuousUpdateStatusPb setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public ContinuousUpdateStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContinuousUpdateStatusPb that = (ContinuousUpdateStatusPb) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress) + && Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress, lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(ContinuousUpdateStatusPb.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Converters.java new file mode 100755 index 000000000..bbd3aafa2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.catalog; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java index 2d8d187df..734526694 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCatalog.CreateCatalogSerializer.class) +@JsonDeserialize(using = CreateCatalog.CreateCatalogDeserializer.class) public class CreateCatalog { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** The name of the connection to an external data source. */ - @JsonProperty("connection_name") private String connectionName; /** Name of catalog. */ - @JsonProperty("name") private String name; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** @@ -36,15 +42,12 @@ public class CreateCatalog { *

A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing * server. */ - @JsonProperty("provider_name") private String providerName; /** The name of the share under the share provider. */ - @JsonProperty("share_name") private String shareName; /** Storage root URL for managed tables within catalog. */ - @JsonProperty("storage_root") private String storageRoot; public CreateCatalog setComment(String comment) { @@ -153,4 +156,51 @@ public String toString() { .add("storageRoot", storageRoot) .toString(); } + + CreateCatalogPb toPb() { + CreateCatalogPb pb = new CreateCatalogPb(); + pb.setComment(comment); + pb.setConnectionName(connectionName); + pb.setName(name); + pb.setOptions(options); + pb.setProperties(properties); + pb.setProviderName(providerName); + pb.setShareName(shareName); + pb.setStorageRoot(storageRoot); + + return pb; + } + + static CreateCatalog fromPb(CreateCatalogPb pb) { + CreateCatalog model = new CreateCatalog(); + model.setComment(pb.getComment()); + model.setConnectionName(pb.getConnectionName()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setProperties(pb.getProperties()); + model.setProviderName(pb.getProviderName()); + model.setShareName(pb.getShareName()); + model.setStorageRoot(pb.getStorageRoot()); + + return model; + } + + public static class CreateCatalogSerializer extends JsonSerializer { + @Override + public void serialize(CreateCatalog value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCatalogPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCatalogDeserializer extends JsonDeserializer { + @Override + public CreateCatalog deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCatalogPb pb = mapper.readValue(p, CreateCatalogPb.class); + return CreateCatalog.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalogPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalogPb.java new file mode 100755 index 000000000..8fc808852 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalogPb.java @@ -0,0 +1,143 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateCatalogPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("connection_name") + private String connectionName; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Map options; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("provider_name") + private String providerName; + + @JsonProperty("share_name") + private String shareName; + + @JsonProperty("storage_root") + private String storageRoot; + + public CreateCatalogPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateCatalogPb setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + + public CreateCatalogPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCatalogPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public CreateCatalogPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public CreateCatalogPb setProviderName(String providerName) { + this.providerName = providerName; + return this; + } + + public String getProviderName() { + return providerName; + } + + public CreateCatalogPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public CreateCatalogPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCatalogPb that = (CreateCatalogPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(connectionName, that.connectionName) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(properties, that.properties) + && Objects.equals(providerName, that.providerName) + && Objects.equals(shareName, that.shareName) + && Objects.equals(storageRoot, that.storageRoot); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, connectionName, name, options, properties, providerName, shareName, storageRoot); + } + + @Override + public String toString() { + return new ToStringer(CreateCatalogPb.class) + .add("comment", comment) + .add("connectionName", connectionName) + .add("name", name) + .add("options", options) + .add("properties", properties) + .add("providerName", providerName) + .add("shareName", shareName) + .add("storageRoot", storageRoot) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java index 3eea7832c..413b0109a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateConnection.CreateConnectionSerializer.class) +@JsonDeserialize(using = CreateConnection.CreateConnectionDeserializer.class) public class CreateConnection { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** The type of connection. */ - @JsonProperty("connection_type") private ConnectionType connectionType; /** Name of the connection. */ - @JsonProperty("name") private String name; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** If the connection is read only. */ - @JsonProperty("read_only") private Boolean readOnly; public CreateConnection setComment(String comment) { @@ -117,4 +122,48 @@ public String toString() { .add("readOnly", readOnly) .toString(); } + + CreateConnectionPb toPb() { + CreateConnectionPb pb = new CreateConnectionPb(); + pb.setComment(comment); + pb.setConnectionType(connectionType); + pb.setName(name); + pb.setOptions(options); + pb.setProperties(properties); + pb.setReadOnly(readOnly); + + return pb; + } + + static CreateConnection fromPb(CreateConnectionPb pb) { + CreateConnection model = new CreateConnection(); + model.setComment(pb.getComment()); + model.setConnectionType(pb.getConnectionType()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setProperties(pb.getProperties()); + model.setReadOnly(pb.getReadOnly()); + + return model; + } + + public static class CreateConnectionSerializer extends JsonSerializer { + @Override + public void serialize(CreateConnection value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateConnectionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateConnectionDeserializer extends JsonDeserializer { + @Override + public CreateConnection deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateConnectionPb pb = mapper.readValue(p, CreateConnectionPb.class); + return CreateConnection.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnectionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnectionPb.java new file mode 100755 index 000000000..20e4081ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnectionPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateConnectionPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("connection_type") + private ConnectionType connectionType; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Map options; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("read_only") + private Boolean readOnly; + + public CreateConnectionPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateConnectionPb setConnectionType(ConnectionType connectionType) { + this.connectionType = connectionType; + return this; + } + + public ConnectionType getConnectionType() { + return connectionType; + } + + public CreateConnectionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateConnectionPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public CreateConnectionPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public CreateConnectionPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateConnectionPb that = (CreateConnectionPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(connectionType, that.connectionType) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(properties, that.properties) + && Objects.equals(readOnly, that.readOnly); + } + + @Override + public int hashCode() { + return Objects.hash(comment, connectionType, name, options, properties, readOnly); + } + + @Override + public String toString() { + return new ToStringer(CreateConnectionPb.class) + .add("comment", comment) + .add("connectionType", connectionType) + .add("name", name) + .add("options", options) + .add("properties", properties) + .add("readOnly", readOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java index 023a3e68e..9765ec011 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java @@ -4,53 +4,55 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCredentialRequest.CreateCredentialRequestSerializer.class) +@JsonDeserialize(using = CreateCredentialRequest.CreateCredentialRequestDeserializer.class) public class CreateCredentialRequest { /** The AWS IAM role configuration */ - @JsonProperty("aws_iam_role") private AwsIamRole awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccount databricksGcpServiceAccount; /** * The credential name. The name must be unique among storage and service credentials within the * metastore. */ - @JsonProperty("name") private String name; /** Indicates the purpose of the credential. */ - @JsonProperty("purpose") private CredentialPurpose purpose; /** * Whether the credential is usable only for read operations. Only applicable when purpose is * **STORAGE**. */ - @JsonProperty("read_only") private Boolean readOnly; /** * Optional. Supplying true to this argument skips validation of the created set of credentials. */ - @JsonProperty("skip_validation") private Boolean skipValidation; public CreateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { @@ -181,4 +183,57 @@ public String toString() { .add("skipValidation", skipValidation) .toString(); } + + CreateCredentialRequestPb toPb() { + CreateCredentialRequestPb pb = new CreateCredentialRequestPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setComment(comment); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setName(name); + pb.setPurpose(purpose); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + + return pb; + } + + static CreateCredentialRequest fromPb(CreateCredentialRequestPb pb) { + CreateCredentialRequest model = new CreateCredentialRequest(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setComment(pb.getComment()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setName(pb.getName()); + model.setPurpose(pb.getPurpose()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + + return model; + } + + public static class CreateCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialRequestPb pb = mapper.readValue(p, CreateCredentialRequestPb.class); + return CreateCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequestPb.java new file mode 100755 index 000000000..f09fb917a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequestPb.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialRequestPb { + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + + @JsonProperty("name") + private String name; + + @JsonProperty("purpose") + private CredentialPurpose purpose; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public CreateCredentialRequestPb setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CreateCredentialRequestPb setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CreateCredentialRequestPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public CreateCredentialRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateCredentialRequestPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public CreateCredentialRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCredentialRequestPb setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CreateCredentialRequestPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public CreateCredentialRequestPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialRequestPb that = (CreateCredentialRequestPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(name, that.name) + && Objects.equals(purpose, that.purpose) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + comment, + databricksGcpServiceAccount, + name, + purpose, + readOnly, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialRequestPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("name", name) + .add("purpose", purpose) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java index 585c5876d..75713e8cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExternalLocation.CreateExternalLocationSerializer.class) +@JsonDeserialize(using = CreateExternalLocation.CreateExternalLocationDeserializer.class) public class CreateExternalLocation { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Name of the storage credential used with this location. */ - @JsonProperty("credential_name") private String credentialName; /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ - @JsonProperty("enable_file_events") private Boolean enableFileEvents; /** Encryption options that apply to clients connecting to cloud storage. */ - @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; /** @@ -30,27 +37,21 @@ public class CreateExternalLocation { * enabled, the access to the location falls back to cluster credentials if UC credentials are not * sufficient. */ - @JsonProperty("fallback") private Boolean fallback; /** [Create:OPT Update:OPT] File event queue settings. */ - @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; /** Name of the external location. */ - @JsonProperty("name") private String name; /** Indicates whether the external location is read-only. */ - @JsonProperty("read_only") private Boolean readOnly; /** Skips validation of the storage credential associated with the external location. */ - @JsonProperty("skip_validation") private Boolean skipValidation; /** Path URL of the external location. */ - @JsonProperty("url") private String url; public CreateExternalLocation setComment(String comment) { @@ -190,4 +191,59 @@ public String toString() { .add("url", url) .toString(); } + + CreateExternalLocationPb toPb() { + CreateExternalLocationPb pb = new CreateExternalLocationPb(); + pb.setComment(comment); + pb.setCredentialName(credentialName); + pb.setEnableFileEvents(enableFileEvents); + pb.setEncryptionDetails(encryptionDetails); + pb.setFallback(fallback); + pb.setFileEventQueue(fileEventQueue); + pb.setName(name); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + pb.setUrl(url); + + return pb; + } + + static CreateExternalLocation fromPb(CreateExternalLocationPb pb) { + CreateExternalLocation model = new CreateExternalLocation(); + model.setComment(pb.getComment()); + model.setCredentialName(pb.getCredentialName()); + model.setEnableFileEvents(pb.getEnableFileEvents()); + model.setEncryptionDetails(pb.getEncryptionDetails()); + model.setFallback(pb.getFallback()); + model.setFileEventQueue(pb.getFileEventQueue()); + model.setName(pb.getName()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class CreateExternalLocationSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExternalLocation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExternalLocationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExternalLocationDeserializer + extends JsonDeserializer { + @Override + public CreateExternalLocation deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExternalLocationPb pb = mapper.readValue(p, CreateExternalLocationPb.class); + return CreateExternalLocation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocationPb.java new file mode 100755 index 000000000..5e4af0040 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocationPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExternalLocationPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("credential_name") + private String credentialName; + + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + + @JsonProperty("encryption_details") + private EncryptionDetails encryptionDetails; + + @JsonProperty("fallback") + private Boolean fallback; + + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + + @JsonProperty("name") + private String name; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + @JsonProperty("url") + private String url; + + public CreateExternalLocationPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateExternalLocationPb setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public CreateExternalLocationPb setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + + public CreateExternalLocationPb setEncryptionDetails(EncryptionDetails encryptionDetails) { + this.encryptionDetails = encryptionDetails; + return this; + } + + public EncryptionDetails getEncryptionDetails() { + return encryptionDetails; + } + + public CreateExternalLocationPb setFallback(Boolean fallback) { + this.fallback = fallback; + return this; + } + + public Boolean getFallback() { + return fallback; + } + + public CreateExternalLocationPb setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + + public CreateExternalLocationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateExternalLocationPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public CreateExternalLocationPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + public CreateExternalLocationPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExternalLocationPb that = (CreateExternalLocationPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) + && Objects.equals(encryptionDetails, that.encryptionDetails) + && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) + && Objects.equals(name, that.name) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + credentialName, + enableFileEvents, + encryptionDetails, + fallback, + fileEventQueue, + name, + readOnly, + skipValidation, + url); + } + + @Override + public String toString() { + return new ToStringer(CreateExternalLocationPb.class) + .add("comment", comment) + .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) + .add("encryptionDetails", encryptionDetails) + .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) + .add("name", name) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java index 83052df1f..796e967b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java @@ -4,61 +4,59 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateFunction.CreateFunctionSerializer.class) +@JsonDeserialize(using = CreateFunction.CreateFunctionDeserializer.class) public class CreateFunction { /** Name of parent catalog. */ - @JsonProperty("catalog_name") private String catalogName; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Scalar function return data type. */ - @JsonProperty("data_type") private ColumnTypeName dataType; /** External function language. */ - @JsonProperty("external_language") private String externalLanguage; /** External function name. */ - @JsonProperty("external_name") private String externalName; /** Pretty printed function data type. */ - @JsonProperty("full_data_type") private String fullDataType; /** */ - @JsonProperty("input_params") private FunctionParameterInfos inputParams; /** Whether the function is deterministic. */ - @JsonProperty("is_deterministic") private Boolean isDeterministic; /** Function null call. */ - @JsonProperty("is_null_call") private Boolean isNullCall; /** Name of function, relative to parent schema. */ - @JsonProperty("name") private String name; /** Function parameter style. **S** is the value for SQL. */ - @JsonProperty("parameter_style") private CreateFunctionParameterStyle parameterStyle; /** JSON-serialized key-value pair map, encoded (escaped) as a string. */ - @JsonProperty("properties") private String properties; /** Table function return parameters. */ - @JsonProperty("return_params") private FunctionParameterInfos returnParams; /** @@ -67,35 +65,27 @@ public class CreateFunction { * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be * **NO_SQL**. */ - @JsonProperty("routine_body") private CreateFunctionRoutineBody routineBody; /** Function body. */ - @JsonProperty("routine_definition") private String routineDefinition; /** Function dependencies. */ - @JsonProperty("routine_dependencies") private DependencyList routineDependencies; /** Name of parent schema relative to its parent catalog. */ - @JsonProperty("schema_name") private String schemaName; /** Function security type. */ - @JsonProperty("security_type") private CreateFunctionSecurityType securityType; /** Specific name of the function; Reserved for future use. */ - @JsonProperty("specific_name") private String specificName; /** Function SQL data access. */ - @JsonProperty("sql_data_access") private CreateFunctionSqlDataAccess sqlDataAccess; /** List of schemes whose objects can be referenced without qualification. */ - @JsonProperty("sql_path") private String sqlPath; public CreateFunction setCatalogName(String catalogName) { @@ -367,4 +357,78 @@ public String toString() { .add("sqlPath", sqlPath) .toString(); } + + CreateFunctionPb toPb() { + CreateFunctionPb pb = new CreateFunctionPb(); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setDataType(dataType); + pb.setExternalLanguage(externalLanguage); + pb.setExternalName(externalName); + pb.setFullDataType(fullDataType); + pb.setInputParams(inputParams); + pb.setIsDeterministic(isDeterministic); + pb.setIsNullCall(isNullCall); + pb.setName(name); + pb.setParameterStyle(parameterStyle); + pb.setProperties(properties); + pb.setReturnParams(returnParams); + pb.setRoutineBody(routineBody); + pb.setRoutineDefinition(routineDefinition); + pb.setRoutineDependencies(routineDependencies); + pb.setSchemaName(schemaName); + pb.setSecurityType(securityType); + pb.setSpecificName(specificName); + pb.setSqlDataAccess(sqlDataAccess); + pb.setSqlPath(sqlPath); + + return pb; + } + + static CreateFunction fromPb(CreateFunctionPb pb) { + CreateFunction model = new CreateFunction(); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setDataType(pb.getDataType()); + model.setExternalLanguage(pb.getExternalLanguage()); + model.setExternalName(pb.getExternalName()); + model.setFullDataType(pb.getFullDataType()); + model.setInputParams(pb.getInputParams()); + model.setIsDeterministic(pb.getIsDeterministic()); + model.setIsNullCall(pb.getIsNullCall()); + model.setName(pb.getName()); + model.setParameterStyle(pb.getParameterStyle()); + model.setProperties(pb.getProperties()); + model.setReturnParams(pb.getReturnParams()); + model.setRoutineBody(pb.getRoutineBody()); + model.setRoutineDefinition(pb.getRoutineDefinition()); + model.setRoutineDependencies(pb.getRoutineDependencies()); + model.setSchemaName(pb.getSchemaName()); + model.setSecurityType(pb.getSecurityType()); + model.setSpecificName(pb.getSpecificName()); + model.setSqlDataAccess(pb.getSqlDataAccess()); + model.setSqlPath(pb.getSqlPath()); + + return model; + } + + public static class CreateFunctionSerializer extends JsonSerializer { + @Override + public void serialize(CreateFunction value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateFunctionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateFunctionDeserializer extends JsonDeserializer { + @Override + public CreateFunction deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateFunctionPb pb = mapper.readValue(p, CreateFunctionPb.class); + return CreateFunction.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionPb.java new file mode 100755 index 000000000..c3abc9180 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionPb.java @@ -0,0 +1,344 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateFunctionPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("data_type") + private ColumnTypeName dataType; + + @JsonProperty("external_language") + private String externalLanguage; + + @JsonProperty("external_name") + private String externalName; + + @JsonProperty("full_data_type") + private String fullDataType; + + @JsonProperty("input_params") + private FunctionParameterInfos inputParams; + + @JsonProperty("is_deterministic") + private Boolean isDeterministic; + + @JsonProperty("is_null_call") + private Boolean isNullCall; + + @JsonProperty("name") + private String name; + + @JsonProperty("parameter_style") + private CreateFunctionParameterStyle parameterStyle; + + @JsonProperty("properties") + private String properties; + + @JsonProperty("return_params") + private FunctionParameterInfos returnParams; + + @JsonProperty("routine_body") + private CreateFunctionRoutineBody routineBody; + + @JsonProperty("routine_definition") + private String routineDefinition; + + @JsonProperty("routine_dependencies") + private DependencyList routineDependencies; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("security_type") + private CreateFunctionSecurityType securityType; + + @JsonProperty("specific_name") + private String specificName; + + @JsonProperty("sql_data_access") + private CreateFunctionSqlDataAccess sqlDataAccess; + + @JsonProperty("sql_path") + private String sqlPath; + + public CreateFunctionPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateFunctionPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateFunctionPb setDataType(ColumnTypeName dataType) { + this.dataType = dataType; + return this; + } + + public ColumnTypeName getDataType() { + return dataType; + } + + public CreateFunctionPb setExternalLanguage(String externalLanguage) { + this.externalLanguage = externalLanguage; + return this; + } + + public String getExternalLanguage() { + return externalLanguage; + } + + public CreateFunctionPb setExternalName(String externalName) { + this.externalName = externalName; + return this; + } + + public String getExternalName() { + return externalName; + } + + public CreateFunctionPb setFullDataType(String fullDataType) { + this.fullDataType = fullDataType; + return this; + } + + public String getFullDataType() { + return fullDataType; + } + + public CreateFunctionPb setInputParams(FunctionParameterInfos inputParams) { + this.inputParams = inputParams; + return this; + } + + public FunctionParameterInfos getInputParams() { + return inputParams; + } + + public CreateFunctionPb setIsDeterministic(Boolean isDeterministic) { + this.isDeterministic = isDeterministic; + return this; + } + + public Boolean getIsDeterministic() { + return isDeterministic; + } + + public CreateFunctionPb setIsNullCall(Boolean isNullCall) { + this.isNullCall = isNullCall; + return this; + } + + public Boolean getIsNullCall() { + return isNullCall; + } + + public CreateFunctionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateFunctionPb setParameterStyle(CreateFunctionParameterStyle parameterStyle) { + this.parameterStyle = parameterStyle; + return this; + } + + public CreateFunctionParameterStyle getParameterStyle() { + return parameterStyle; + } + + public CreateFunctionPb setProperties(String properties) { + this.properties = properties; + return this; + } + + public String getProperties() { + return properties; + } + + public CreateFunctionPb setReturnParams(FunctionParameterInfos returnParams) { + this.returnParams = returnParams; + return this; + } + + public FunctionParameterInfos getReturnParams() { + return returnParams; + } + + public CreateFunctionPb setRoutineBody(CreateFunctionRoutineBody routineBody) { + this.routineBody = routineBody; + return this; + } + + public CreateFunctionRoutineBody getRoutineBody() { + return routineBody; + } + + public CreateFunctionPb setRoutineDefinition(String routineDefinition) { + this.routineDefinition = routineDefinition; + return this; + } + + public String getRoutineDefinition() { + return routineDefinition; + } + + public CreateFunctionPb setRoutineDependencies(DependencyList routineDependencies) { + this.routineDependencies = routineDependencies; + return this; + } + + public DependencyList getRoutineDependencies() { + return routineDependencies; + } + + public CreateFunctionPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public CreateFunctionPb setSecurityType(CreateFunctionSecurityType securityType) { + this.securityType = securityType; + return this; + } + + public CreateFunctionSecurityType getSecurityType() { + return securityType; + } + + public CreateFunctionPb setSpecificName(String specificName) { + this.specificName = specificName; + return this; + } + + public String getSpecificName() { + return specificName; + } + + public CreateFunctionPb setSqlDataAccess(CreateFunctionSqlDataAccess sqlDataAccess) { + this.sqlDataAccess = sqlDataAccess; + return this; + } + + public CreateFunctionSqlDataAccess getSqlDataAccess() { + return sqlDataAccess; + } + + public CreateFunctionPb setSqlPath(String sqlPath) { + this.sqlPath = sqlPath; + return this; + } + + public String getSqlPath() { + return sqlPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFunctionPb that = (CreateFunctionPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(dataType, that.dataType) + && Objects.equals(externalLanguage, that.externalLanguage) + && Objects.equals(externalName, that.externalName) + && Objects.equals(fullDataType, that.fullDataType) + && Objects.equals(inputParams, that.inputParams) + && Objects.equals(isDeterministic, that.isDeterministic) + && Objects.equals(isNullCall, that.isNullCall) + && Objects.equals(name, that.name) + && Objects.equals(parameterStyle, that.parameterStyle) + && Objects.equals(properties, that.properties) + && Objects.equals(returnParams, that.returnParams) + && Objects.equals(routineBody, that.routineBody) + && Objects.equals(routineDefinition, that.routineDefinition) + && Objects.equals(routineDependencies, that.routineDependencies) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(securityType, that.securityType) + && Objects.equals(specificName, that.specificName) + && Objects.equals(sqlDataAccess, that.sqlDataAccess) + && Objects.equals(sqlPath, that.sqlPath); + } + + @Override + public int hashCode() { + return Objects.hash( + catalogName, + comment, + dataType, + externalLanguage, + externalName, + fullDataType, + inputParams, + isDeterministic, + isNullCall, + name, + parameterStyle, + properties, + returnParams, + routineBody, + routineDefinition, + routineDependencies, + schemaName, + securityType, + specificName, + sqlDataAccess, + sqlPath); + } + + @Override + public String toString() { + return new ToStringer(CreateFunctionPb.class) + .add("catalogName", catalogName) + .add("comment", comment) + .add("dataType", dataType) + .add("externalLanguage", externalLanguage) + .add("externalName", externalName) + .add("fullDataType", fullDataType) + .add("inputParams", inputParams) + .add("isDeterministic", isDeterministic) + .add("isNullCall", isNullCall) + .add("name", name) + .add("parameterStyle", parameterStyle) + .add("properties", properties) + .add("returnParams", returnParams) + .add("routineBody", routineBody) + .add("routineDefinition", routineDefinition) + .add("routineDependencies", routineDependencies) + .add("schemaName", schemaName) + .add("securityType", securityType) + .add("specificName", specificName) + .add("sqlDataAccess", sqlDataAccess) + .add("sqlPath", sqlPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java index 16a8227e7..b96e538df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateFunctionRequest.CreateFunctionRequestSerializer.class) +@JsonDeserialize(using = CreateFunctionRequest.CreateFunctionRequestDeserializer.class) public class CreateFunctionRequest { /** Partial __FunctionInfo__ specifying the function to be created. */ - @JsonProperty("function_info") private CreateFunction functionInfo; public CreateFunctionRequest setFunctionInfo(CreateFunction functionInfo) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateFunctionRequest.class).add("functionInfo", functionInfo).toString(); } + + CreateFunctionRequestPb toPb() { + CreateFunctionRequestPb pb = new CreateFunctionRequestPb(); + pb.setFunctionInfo(functionInfo); + + return pb; + } + + static CreateFunctionRequest fromPb(CreateFunctionRequestPb pb) { + CreateFunctionRequest model = new CreateFunctionRequest(); + model.setFunctionInfo(pb.getFunctionInfo()); + + return model; + } + + public static class CreateFunctionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateFunctionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateFunctionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateFunctionRequestDeserializer + extends JsonDeserializer { + @Override + public CreateFunctionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateFunctionRequestPb pb = mapper.readValue(p, CreateFunctionRequestPb.class); + return CreateFunctionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequestPb.java new file mode 100755 index 000000000..ebd7cba56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateFunctionRequestPb { + @JsonProperty("function_info") + private CreateFunction functionInfo; + + public CreateFunctionRequestPb setFunctionInfo(CreateFunction functionInfo) { + this.functionInfo = functionInfo; + return this; + } + + public CreateFunction getFunctionInfo() { + return functionInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFunctionRequestPb that = (CreateFunctionRequestPb) o; + return Objects.equals(functionInfo, that.functionInfo); + } + + @Override + public int hashCode() { + return Objects.hash(functionInfo); + } + + @Override + public String toString() { + return new ToStringer(CreateFunctionRequestPb.class) + .add("functionInfo", functionInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java index 717ad49a0..bd9dfd8a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java @@ -4,25 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateMetastore.CreateMetastoreSerializer.class) +@JsonDeserialize(using = CreateMetastore.CreateMetastoreDeserializer.class) public class CreateMetastore { /** The user-specified name of the metastore. */ - @JsonProperty("name") private String name; - /** - * Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted - * in the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is - * omitted, the region of the workspace receiving the request will be used. - */ - @JsonProperty("region") + /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */ private String region; /** The storage root URL for metastore */ - @JsonProperty("storage_root") private String storageRoot; public CreateMetastore setName(String name) { @@ -75,4 +79,42 @@ public String toString() { .add("storageRoot", storageRoot) .toString(); } + + CreateMetastorePb toPb() { + CreateMetastorePb pb = new CreateMetastorePb(); + pb.setName(name); + pb.setRegion(region); + pb.setStorageRoot(storageRoot); + + return pb; + } + + static CreateMetastore fromPb(CreateMetastorePb pb) { + CreateMetastore model = new CreateMetastore(); + model.setName(pb.getName()); + model.setRegion(pb.getRegion()); + model.setStorageRoot(pb.getStorageRoot()); + + return model; + } + + public static class CreateMetastoreSerializer extends JsonSerializer { + @Override + public void serialize(CreateMetastore value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateMetastorePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateMetastoreDeserializer extends JsonDeserializer { + @Override + public CreateMetastore deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateMetastorePb pb = mapper.readValue(p, CreateMetastorePb.class); + return CreateMetastore.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java index 779793175..0cd5972bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateMetastoreAssignment.CreateMetastoreAssignmentSerializer.class) +@JsonDeserialize(using = CreateMetastoreAssignment.CreateMetastoreAssignmentDeserializer.class) public class CreateMetastoreAssignment { /** - * The name of the default catalog in the metastore. This field is depracted. Please use "Default + * The name of the default catalog in the metastore. This field is deprecated. Please use "Default * Namespace API" to configure the default catalog for a Databricks workspace. */ - @JsonProperty("default_catalog_name") private String defaultCatalogName; /** The unique ID of the metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** A workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public CreateMetastoreAssignment setDefaultCatalogName(String defaultCatalogName) { this.defaultCatalogName = defaultCatalogName; @@ -74,4 +82,45 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + CreateMetastoreAssignmentPb toPb() { + CreateMetastoreAssignmentPb pb = new CreateMetastoreAssignmentPb(); + pb.setDefaultCatalogName(defaultCatalogName); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static CreateMetastoreAssignment fromPb(CreateMetastoreAssignmentPb pb) { + CreateMetastoreAssignment model = new CreateMetastoreAssignment(); + model.setDefaultCatalogName(pb.getDefaultCatalogName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class CreateMetastoreAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateMetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateMetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateMetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public CreateMetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateMetastoreAssignmentPb pb = mapper.readValue(p, CreateMetastoreAssignmentPb.class); + return CreateMetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignmentPb.java new file mode 100755 index 000000000..81188bc8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignmentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateMetastoreAssignmentPb { + @JsonProperty("default_catalog_name") + private String defaultCatalogName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public CreateMetastoreAssignmentPb setDefaultCatalogName(String defaultCatalogName) { + this.defaultCatalogName = defaultCatalogName; + return this; + } + + public String getDefaultCatalogName() { + return defaultCatalogName; + } + + public CreateMetastoreAssignmentPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public CreateMetastoreAssignmentPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMetastoreAssignmentPb that = (CreateMetastoreAssignmentPb) o; + return Objects.equals(defaultCatalogName, that.defaultCatalogName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(defaultCatalogName, metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(CreateMetastoreAssignmentPb.class) + .add("defaultCatalogName", defaultCatalogName) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastorePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastorePb.java new file mode 100755 index 000000000..662a9c379 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastorePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateMetastorePb { + @JsonProperty("name") + private String name; + + @JsonProperty("region") + private String region; + + @JsonProperty("storage_root") + private String storageRoot; + + public CreateMetastorePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateMetastorePb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public CreateMetastorePb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMetastorePb that = (CreateMetastorePb) o; + return Objects.equals(name, that.name) + && Objects.equals(region, that.region) + && Objects.equals(storageRoot, that.storageRoot); + } + + @Override + public int hashCode() { + return Objects.hash(name, region, storageRoot); + } + + @Override + public String toString() { + return new ToStringer(CreateMetastorePb.class) + .add("name", name) + .add("region", region) + .add("storageRoot", storageRoot) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java index bb55502cd..d9c7c175a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateMonitor.CreateMonitorSerializer.class) +@JsonDeserialize(using = CreateMonitor.CreateMonitorDeserializer.class) public class CreateMonitor { /** The directory to store monitoring assets (e.g. dashboard, metric tables). */ - @JsonProperty("assets_dir") private String assetsDir; /** * Name of the baseline table from which drift metrics are computed from. Columns in the monitored * table should also be present in the baseline table. */ - @JsonProperty("baseline_table_name") private String baselineTableName; /** @@ -27,31 +35,24 @@ public class CreateMonitor { * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across * time windows). */ - @JsonProperty("custom_metrics") private Collection customMetrics; /** The data classification config for the monitor. */ - @JsonProperty("data_classification_config") private MonitorDataClassificationConfig dataClassificationConfig; /** Configuration for monitoring inference logs. */ - @JsonProperty("inference_log") private MonitorInferenceLog inferenceLog; /** The notification settings for the monitor. */ - @JsonProperty("notifications") private MonitorNotifications notifications; /** Schema where output metric tables are created. */ - @JsonProperty("output_schema_name") private String outputSchemaName; /** The schedule for automatically updating and refreshing metric tables. */ - @JsonProperty("schedule") private MonitorCronSchedule schedule; /** Whether to skip creating a default dashboard summarizing data quality metrics. */ - @JsonProperty("skip_builtin_dashboard") private Boolean skipBuiltinDashboard; /** @@ -60,25 +61,21 @@ public class CreateMonitor { * complements. For high-cardinality columns, only the top 100 unique values by frequency will * generate slices. */ - @JsonProperty("slicing_exprs") private Collection slicingExprs; /** Configuration for monitoring snapshot tables. */ - @JsonProperty("snapshot") private MonitorSnapshot snapshot; /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; /** Configuration for monitoring time series tables. */ - @JsonProperty("time_series") private MonitorTimeSeries timeSeries; /** * Optional argument to specify the warehouse for dashboard creation. If not specified, the first * running warehouse will be used. */ - @JsonProperty("warehouse_id") private String warehouseId; public CreateMonitor setAssetsDir(String assetsDir) { @@ -267,4 +264,63 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + CreateMonitorPb toPb() { + CreateMonitorPb pb = new CreateMonitorPb(); + pb.setAssetsDir(assetsDir); + pb.setBaselineTableName(baselineTableName); + pb.setCustomMetrics(customMetrics); + pb.setDataClassificationConfig(dataClassificationConfig); + pb.setInferenceLog(inferenceLog); + pb.setNotifications(notifications); + pb.setOutputSchemaName(outputSchemaName); + pb.setSchedule(schedule); + pb.setSkipBuiltinDashboard(skipBuiltinDashboard); + pb.setSlicingExprs(slicingExprs); + pb.setSnapshot(snapshot); + pb.setTableName(tableName); + pb.setTimeSeries(timeSeries); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static CreateMonitor fromPb(CreateMonitorPb pb) { + CreateMonitor model = new CreateMonitor(); + model.setAssetsDir(pb.getAssetsDir()); + model.setBaselineTableName(pb.getBaselineTableName()); + model.setCustomMetrics(pb.getCustomMetrics()); + model.setDataClassificationConfig(pb.getDataClassificationConfig()); + model.setInferenceLog(pb.getInferenceLog()); + model.setNotifications(pb.getNotifications()); + model.setOutputSchemaName(pb.getOutputSchemaName()); + model.setSchedule(pb.getSchedule()); + model.setSkipBuiltinDashboard(pb.getSkipBuiltinDashboard()); + model.setSlicingExprs(pb.getSlicingExprs()); + model.setSnapshot(pb.getSnapshot()); + model.setTableName(pb.getTableName()); + model.setTimeSeries(pb.getTimeSeries()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class CreateMonitorSerializer extends JsonSerializer { + @Override + public void serialize(CreateMonitor value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateMonitorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateMonitorDeserializer extends JsonDeserializer { + @Override + public CreateMonitor deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateMonitorPb pb = mapper.readValue(p, CreateMonitorPb.class); + return CreateMonitor.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitorPb.java new file mode 100755 index 000000000..c2a098cf6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitorPb.java @@ -0,0 +1,241 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateMonitorPb { + @JsonProperty("assets_dir") + private String assetsDir; + + @JsonProperty("baseline_table_name") + private String baselineTableName; + + @JsonProperty("custom_metrics") + private Collection customMetrics; + + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + @JsonProperty("inference_log") + private MonitorInferenceLog inferenceLog; + + @JsonProperty("notifications") + private MonitorNotifications notifications; + + @JsonProperty("output_schema_name") + private String outputSchemaName; + + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + @JsonProperty("skip_builtin_dashboard") + private Boolean skipBuiltinDashboard; + + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + @JsonProperty("snapshot") + private MonitorSnapshot snapshot; + + @JsonIgnore private String tableName; + + @JsonProperty("time_series") + private MonitorTimeSeries timeSeries; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public CreateMonitorPb setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public CreateMonitorPb setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public CreateMonitorPb setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public CreateMonitorPb setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public CreateMonitorPb setInferenceLog(MonitorInferenceLog inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLog getInferenceLog() { + return inferenceLog; + } + + public CreateMonitorPb setNotifications(MonitorNotifications notifications) { + this.notifications = notifications; + return this; + } + + public MonitorNotifications getNotifications() { + return notifications; + } + + public CreateMonitorPb setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public CreateMonitorPb setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public CreateMonitorPb setSkipBuiltinDashboard(Boolean skipBuiltinDashboard) { + this.skipBuiltinDashboard = skipBuiltinDashboard; + return this; + } + + public Boolean getSkipBuiltinDashboard() { + return skipBuiltinDashboard; + } + + public CreateMonitorPb setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public CreateMonitorPb setSnapshot(MonitorSnapshot snapshot) { + this.snapshot = snapshot; + return this; + } + + public MonitorSnapshot getSnapshot() { + return snapshot; + } + + public CreateMonitorPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public CreateMonitorPb setTimeSeries(MonitorTimeSeries timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeries getTimeSeries() { + return timeSeries; + } + + public CreateMonitorPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMonitorPb that = (CreateMonitorPb) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(tableName, that.tableName) + && Objects.equals(timeSeries, that.timeSeries) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dataClassificationConfig, + inferenceLog, + notifications, + outputSchemaName, + schedule, + skipBuiltinDashboard, + slicingExprs, + snapshot, + tableName, + timeSeries, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(CreateMonitorPb.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dataClassificationConfig", dataClassificationConfig) + .add("inferenceLog", inferenceLog) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("schedule", schedule) + .add("skipBuiltinDashboard", skipBuiltinDashboard) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("tableName", tableName) + .add("timeSeries", timeSeries) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java index 7f3a0730c..701159ce2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an Online Table */ @Generated +@JsonSerialize(using = CreateOnlineTableRequest.CreateOnlineTableRequestSerializer.class) +@JsonDeserialize(using = CreateOnlineTableRequest.CreateOnlineTableRequestDeserializer.class) public class CreateOnlineTableRequest { /** Online Table information. */ - @JsonProperty("table") private OnlineTable table; public CreateOnlineTableRequest setTable(OnlineTable table) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateOnlineTableRequest.class).add("table", table).toString(); } + + CreateOnlineTableRequestPb toPb() { + CreateOnlineTableRequestPb pb = new CreateOnlineTableRequestPb(); + pb.setTable(table); + + return pb; + } + + static CreateOnlineTableRequest fromPb(CreateOnlineTableRequestPb pb) { + CreateOnlineTableRequest model = new CreateOnlineTableRequest(); + model.setTable(pb.getTable()); + + return model; + } + + public static class CreateOnlineTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateOnlineTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateOnlineTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateOnlineTableRequestDeserializer + extends JsonDeserializer { + @Override + public CreateOnlineTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateOnlineTableRequestPb pb = mapper.readValue(p, CreateOnlineTableRequestPb.class); + return CreateOnlineTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequestPb.java new file mode 100755 index 000000000..78bbd8523 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequestPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an Online Table */ +@Generated +class CreateOnlineTableRequestPb { + @JsonProperty("table") + private OnlineTable table; + + public CreateOnlineTableRequestPb setTable(OnlineTable table) { + this.table = table; + return this; + } + + public OnlineTable getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateOnlineTableRequestPb that = (CreateOnlineTableRequestPb) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + return new ToStringer(CreateOnlineTableRequestPb.class).add("table", table).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java index 520b0f60a..5fb31d199 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java @@ -4,29 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRegisteredModelRequest.CreateRegisteredModelRequestSerializer.class) +@JsonDeserialize( + using = CreateRegisteredModelRequest.CreateRegisteredModelRequestDeserializer.class) public class CreateRegisteredModelRequest { /** The name of the catalog where the schema and the registered model reside */ - @JsonProperty("catalog_name") private String catalogName; /** The comment attached to the registered model */ - @JsonProperty("comment") private String comment; /** The name of the registered model */ - @JsonProperty("name") private String name; /** The name of the schema where the registered model resides */ - @JsonProperty("schema_name") private String schemaName; /** The storage location on the cloud under which model version data files are stored */ - @JsonProperty("storage_location") private String storageLocation; public CreateRegisteredModelRequest setCatalogName(String catalogName) { @@ -101,4 +108,49 @@ public String toString() { .add("storageLocation", storageLocation) .toString(); } + + CreateRegisteredModelRequestPb toPb() { + CreateRegisteredModelRequestPb pb = new CreateRegisteredModelRequestPb(); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setName(name); + pb.setSchemaName(schemaName); + pb.setStorageLocation(storageLocation); + + return pb; + } + + static CreateRegisteredModelRequest fromPb(CreateRegisteredModelRequestPb pb) { + CreateRegisteredModelRequest model = new CreateRegisteredModelRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setSchemaName(pb.getSchemaName()); + model.setStorageLocation(pb.getStorageLocation()); + + return model; + } + + public static class CreateRegisteredModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateRegisteredModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRegisteredModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRegisteredModelRequestDeserializer + extends JsonDeserializer { + @Override + public CreateRegisteredModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRegisteredModelRequestPb pb = mapper.readValue(p, CreateRegisteredModelRequestPb.class); + return CreateRegisteredModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequestPb.java new file mode 100755 index 000000000..6ce3c4a9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequestPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateRegisteredModelRequestPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("storage_location") + private String storageLocation; + + public CreateRegisteredModelRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateRegisteredModelRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateRegisteredModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateRegisteredModelRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public CreateRegisteredModelRequestPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRegisteredModelRequestPb that = (CreateRegisteredModelRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(storageLocation, that.storageLocation); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, comment, name, schemaName, storageLocation); + } + + @Override + public String toString() { + return new ToStringer(CreateRegisteredModelRequestPb.class) + .add("catalogName", catalogName) + .add("comment", comment) + .add("name", name) + .add("schemaName", schemaName) + .add("storageLocation", storageLocation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java index c2be0e3c2..4782253fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateResponse.CreateResponseSerializer.class) +@JsonDeserialize(using = CreateResponse.CreateResponseDeserializer.class) public class CreateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(CreateResponse.class).toString(); } + + CreateResponsePb toPb() { + CreateResponsePb pb = new CreateResponsePb(); + + return pb; + } + + static CreateResponse fromPb(CreateResponsePb pb) { + CreateResponse model = new CreateResponse(); + + return model; + } + + public static class CreateResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateResponseDeserializer extends JsonDeserializer { + @Override + public CreateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateResponsePb pb = mapper.readValue(p, CreateResponsePb.class); + return CreateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponsePb.java similarity index 80% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponsePb.java index 147f31d48..0e8cde401 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponsePb.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class DeleteSyncedDatabaseTableResponse { +class CreateResponsePb { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteSyncedDatabaseTableResponse.class).toString(); + return new ToStringer(CreateResponsePb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java index 916b70171..f0c4f7330 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateSchema.CreateSchemaSerializer.class) +@JsonDeserialize(using = CreateSchema.CreateSchemaDeserializer.class) public class CreateSchema { /** Name of parent catalog. */ - @JsonProperty("catalog_name") private String catalogName; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Name of schema, relative to parent catalog. */ - @JsonProperty("name") private String name; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** Storage root URL for managed tables within schema. */ - @JsonProperty("storage_root") private String storageRoot; public CreateSchema setCatalogName(String catalogName) { @@ -102,4 +108,45 @@ public String toString() { .add("storageRoot", storageRoot) .toString(); } + + CreateSchemaPb toPb() { + CreateSchemaPb pb = new CreateSchemaPb(); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setName(name); + pb.setProperties(properties); + pb.setStorageRoot(storageRoot); + + return pb; + } + + static CreateSchema fromPb(CreateSchemaPb pb) { + CreateSchema model = new CreateSchema(); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setProperties(pb.getProperties()); + model.setStorageRoot(pb.getStorageRoot()); + + return model; + } + + public static class CreateSchemaSerializer extends JsonSerializer { + @Override + public void serialize(CreateSchema value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateSchemaPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateSchemaDeserializer extends JsonDeserializer { + @Override + public CreateSchema deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateSchemaPb pb = mapper.readValue(p, CreateSchemaPb.class); + return CreateSchema.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchemaPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchemaPb.java new file mode 100755 index 000000000..b456c912b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchemaPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateSchemaPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("storage_root") + private String storageRoot; + + public CreateSchemaPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateSchemaPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateSchemaPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateSchemaPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public CreateSchemaPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSchemaPb that = (CreateSchemaPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(properties, that.properties) + && Objects.equals(storageRoot, that.storageRoot); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, comment, name, properties, storageRoot); + } + + @Override + public String toString() { + return new ToStringer(CreateSchemaPb.class) + .add("catalogName", catalogName) + .add("comment", comment) + .add("name", name) + .add("properties", properties) + .add("storageRoot", storageRoot) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java index b462a9075..15db55f8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java @@ -4,45 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateStorageCredential.CreateStorageCredentialSerializer.class) +@JsonDeserialize(using = CreateStorageCredential.CreateStorageCredentialDeserializer.class) public class CreateStorageCredential { /** The AWS IAM role configuration. */ - @JsonProperty("aws_iam_role") private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentityRequest azureManagedIdentity; /** The Azure service principal configuration. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** The Cloudflare API token configuration. */ - @JsonProperty("cloudflare_api_token") private CloudflareApiToken cloudflareApiToken; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** The Databricks managed GCP service account configuration. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; /** The credential name. The name must be unique within the metastore. */ - @JsonProperty("name") private String name; /** Whether the storage credential is only usable for read operations. */ - @JsonProperty("read_only") private Boolean readOnly; /** Supplying true to this argument skips validation of the created credential. */ - @JsonProperty("skip_validation") private Boolean skipValidation; public CreateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { @@ -173,4 +175,57 @@ public String toString() { .add("skipValidation", skipValidation) .toString(); } + + CreateStorageCredentialPb toPb() { + CreateStorageCredentialPb pb = new CreateStorageCredentialPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setCloudflareApiToken(cloudflareApiToken); + pb.setComment(comment); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setName(name); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + + return pb; + } + + static CreateStorageCredential fromPb(CreateStorageCredentialPb pb) { + CreateStorageCredential model = new CreateStorageCredential(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setCloudflareApiToken(pb.getCloudflareApiToken()); + model.setComment(pb.getComment()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setName(pb.getName()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + + return model; + } + + public static class CreateStorageCredentialSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateStorageCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateStorageCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateStorageCredentialDeserializer + extends JsonDeserializer { + @Override + public CreateStorageCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateStorageCredentialPb pb = mapper.readValue(p, CreateStorageCredentialPb.class); + return CreateStorageCredential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredentialPb.java new file mode 100755 index 000000000..8f1f1c24d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredentialPb.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateStorageCredentialPb { + @JsonProperty("aws_iam_role") + private AwsIamRoleRequest awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityRequest azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; + + @JsonProperty("name") + private String name; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public CreateStorageCredentialPb setAwsIamRole(AwsIamRoleRequest awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleRequest getAwsIamRole() { + return awsIamRole; + } + + public CreateStorageCredentialPb setAzureManagedIdentity( + AzureManagedIdentityRequest azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityRequest getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CreateStorageCredentialPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public CreateStorageCredentialPb setCloudflareApiToken(CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public CreateStorageCredentialPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateStorageCredentialPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public CreateStorageCredentialPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateStorageCredentialPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public CreateStorageCredentialPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateStorageCredentialPb that = (CreateStorageCredentialPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(name, that.name) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + comment, + databricksGcpServiceAccount, + name, + readOnly, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(CreateStorageCredentialPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("name", name) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java index d8cbfdde8..3c4b313cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateTableConstraint.CreateTableConstraintSerializer.class) +@JsonDeserialize(using = CreateTableConstraint.CreateTableConstraintDeserializer.class) public class CreateTableConstraint { /** * A table constraint, as defined by *one* of the following fields being set: * __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. */ - @JsonProperty("constraint") private TableConstraint constraint; /** The full name of the table referenced by the constraint. */ - @JsonProperty("full_name_arg") private String fullNameArg; public CreateTableConstraint setConstraint(TableConstraint constraint) { @@ -59,4 +68,43 @@ public String toString() { .add("fullNameArg", fullNameArg) .toString(); } + + CreateTableConstraintPb toPb() { + CreateTableConstraintPb pb = new CreateTableConstraintPb(); + pb.setConstraint(constraint); + pb.setFullNameArg(fullNameArg); + + return pb; + } + + static CreateTableConstraint fromPb(CreateTableConstraintPb pb) { + CreateTableConstraint model = new CreateTableConstraint(); + model.setConstraint(pb.getConstraint()); + model.setFullNameArg(pb.getFullNameArg()); + + return model; + } + + public static class CreateTableConstraintSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateTableConstraint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateTableConstraintPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateTableConstraintDeserializer + extends JsonDeserializer { + @Override + public CreateTableConstraint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateTableConstraintPb pb = mapper.readValue(p, CreateTableConstraintPb.class); + return CreateTableConstraint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraintPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraintPb.java new file mode 100755 index 000000000..285498857 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraintPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateTableConstraintPb { + @JsonProperty("constraint") + private TableConstraint constraint; + + @JsonProperty("full_name_arg") + private String fullNameArg; + + public CreateTableConstraintPb setConstraint(TableConstraint constraint) { + this.constraint = constraint; + return this; + } + + public TableConstraint getConstraint() { + return constraint; + } + + public CreateTableConstraintPb setFullNameArg(String fullNameArg) { + this.fullNameArg = fullNameArg; + return this; + } + + public String getFullNameArg() { + return fullNameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTableConstraintPb that = (CreateTableConstraintPb) o; + return Objects.equals(constraint, that.constraint) + && Objects.equals(fullNameArg, that.fullNameArg); + } + + @Override + public int hashCode() { + return Objects.hash(constraint, fullNameArg); + } + + @Override + public String toString() { + return new ToStringer(CreateTableConstraintPb.class) + .add("constraint", constraint) + .add("fullNameArg", fullNameArg) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java index bbe39faf4..a38792d2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateVolumeRequestContent.CreateVolumeRequestContentSerializer.class) +@JsonDeserialize(using = CreateVolumeRequestContent.CreateVolumeRequestContentDeserializer.class) public class CreateVolumeRequestContent { /** The name of the catalog where the schema and the volume are */ - @JsonProperty("catalog_name") private String catalogName; /** The comment attached to the volume */ - @JsonProperty("comment") private String comment; /** The name of the volume */ - @JsonProperty("name") private String name; /** The name of the schema where the volume is */ - @JsonProperty("schema_name") private String schemaName; /** The storage location on the cloud */ - @JsonProperty("storage_location") private String storageLocation; /** @@ -36,7 +42,6 @@ public class CreateVolumeRequestContent { * *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external */ - @JsonProperty("volume_type") private VolumeType volumeType; public CreateVolumeRequestContent setCatalogName(String catalogName) { @@ -122,4 +127,51 @@ public String toString() { .add("volumeType", volumeType) .toString(); } + + CreateVolumeRequestContentPb toPb() { + CreateVolumeRequestContentPb pb = new CreateVolumeRequestContentPb(); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setName(name); + pb.setSchemaName(schemaName); + pb.setStorageLocation(storageLocation); + pb.setVolumeType(volumeType); + + return pb; + } + + static CreateVolumeRequestContent fromPb(CreateVolumeRequestContentPb pb) { + CreateVolumeRequestContent model = new CreateVolumeRequestContent(); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setSchemaName(pb.getSchemaName()); + model.setStorageLocation(pb.getStorageLocation()); + model.setVolumeType(pb.getVolumeType()); + + return model; + } + + public static class CreateVolumeRequestContentSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateVolumeRequestContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateVolumeRequestContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateVolumeRequestContentDeserializer + extends JsonDeserializer { + @Override + public CreateVolumeRequestContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateVolumeRequestContentPb pb = mapper.readValue(p, CreateVolumeRequestContentPb.class); + return CreateVolumeRequestContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContentPb.java new file mode 100755 index 000000000..56d323d63 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContentPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateVolumeRequestContentPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("volume_type") + private VolumeType volumeType; + + public CreateVolumeRequestContentPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateVolumeRequestContentPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateVolumeRequestContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateVolumeRequestContentPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public CreateVolumeRequestContentPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public CreateVolumeRequestContentPb setVolumeType(VolumeType volumeType) { + this.volumeType = volumeType; + return this; + } + + public VolumeType getVolumeType() { + return volumeType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateVolumeRequestContentPb that = (CreateVolumeRequestContentPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(volumeType, that.volumeType); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, comment, name, schemaName, storageLocation, volumeType); + } + + @Override + public String toString() { + return new ToStringer(CreateVolumeRequestContentPb.class) + .add("catalogName", catalogName) + .add("comment", comment) + .add("name", name) + .add("schemaName", schemaName) + .add("storageLocation", storageLocation) + .add("volumeType", volumeType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java index 629f271e9..6fbd4ec12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java @@ -4,93 +4,86 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CredentialInfo.CredentialInfoSerializer.class) +@JsonDeserialize(using = CredentialInfo.CredentialInfoDeserializer.class) public class CredentialInfo { /** The AWS IAM role configuration */ - @JsonProperty("aws_iam_role") private AwsIamRole awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** Time at which this credential was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of credential creator. */ - @JsonProperty("created_by") private String createdBy; /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccount databricksGcpServiceAccount; /** The full name of the credential. */ - @JsonProperty("full_name") private String fullName; /** The unique identifier of the credential. */ - @JsonProperty("id") private String id; /** * Whether the current securable is accessible from all workspaces or a specific set of * workspaces. */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Unique identifier of the parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** * The credential name. The name must be unique among storage and service credentials within the * metastore. */ - @JsonProperty("name") private String name; /** Username of current owner of credential. */ - @JsonProperty("owner") private String owner; /** Indicates the purpose of the credential. */ - @JsonProperty("purpose") private CredentialPurpose purpose; /** * Whether the credential is usable only for read operations. Only applicable when purpose is * **STORAGE**. */ - @JsonProperty("read_only") private Boolean readOnly; /** Time at which this credential was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the credential. */ - @JsonProperty("updated_by") private String updatedBy; /** * Whether this credential is the current metastore's root storage credential. Only applicable * when purpose is **STORAGE**. */ - @JsonProperty("used_for_managed_storage") private Boolean usedForManagedStorage; public CredentialInfo setAwsIamRole(AwsIamRole awsIamRole) { @@ -327,4 +320,72 @@ public String toString() { .add("usedForManagedStorage", usedForManagedStorage) .toString(); } + + CredentialInfoPb toPb() { + CredentialInfoPb pb = new CredentialInfoPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setFullName(fullName); + pb.setId(id); + pb.setIsolationMode(isolationMode); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setPurpose(purpose); + pb.setReadOnly(readOnly); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setUsedForManagedStorage(usedForManagedStorage); + + return pb; + } + + static CredentialInfo fromPb(CredentialInfoPb pb) { + CredentialInfo model = new CredentialInfo(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setFullName(pb.getFullName()); + model.setId(pb.getId()); + model.setIsolationMode(pb.getIsolationMode()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPurpose(pb.getPurpose()); + model.setReadOnly(pb.getReadOnly()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setUsedForManagedStorage(pb.getUsedForManagedStorage()); + + return model; + } + + public static class CredentialInfoSerializer extends JsonSerializer { + @Override + public void serialize(CredentialInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CredentialInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CredentialInfoDeserializer extends JsonDeserializer { + @Override + public CredentialInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CredentialInfoPb pb = mapper.readValue(p, CredentialInfoPb.class); + return CredentialInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfoPb.java new file mode 100755 index 000000000..02f34a5f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfoPb.java @@ -0,0 +1,300 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CredentialInfoPb { + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("id") + private String id; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("purpose") + private CredentialPurpose purpose; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("used_for_managed_storage") + private Boolean usedForManagedStorage; + + public CredentialInfoPb setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CredentialInfoPb setAzureManagedIdentity(AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CredentialInfoPb setAzureServicePrincipal(AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public CredentialInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CredentialInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CredentialInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public CredentialInfoPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public CredentialInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CredentialInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CredentialInfoPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public CredentialInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public CredentialInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CredentialInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CredentialInfoPb setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CredentialInfoPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public CredentialInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public CredentialInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public CredentialInfoPb setUsedForManagedStorage(Boolean usedForManagedStorage) { + this.usedForManagedStorage = usedForManagedStorage; + return this; + } + + public Boolean getUsedForManagedStorage() { + return usedForManagedStorage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialInfoPb that = (CredentialInfoPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(fullName, that.fullName) + && Objects.equals(id, that.id) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(purpose, that.purpose) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(usedForManagedStorage, that.usedForManagedStorage); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + comment, + createdAt, + createdBy, + databricksGcpServiceAccount, + fullName, + id, + isolationMode, + metastoreId, + name, + owner, + purpose, + readOnly, + updatedAt, + updatedBy, + usedForManagedStorage); + } + + @Override + public String toString() { + return new ToStringer(CredentialInfoPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("fullName", fullName) + .add("id", id) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("purpose", purpose) + .add("readOnly", readOnly) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("usedForManagedStorage", usedForManagedStorage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java index a823a5348..98b897cb2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CredentialValidationResult.CredentialValidationResultSerializer.class) +@JsonDeserialize(using = CredentialValidationResult.CredentialValidationResultDeserializer.class) public class CredentialValidationResult { /** Error message would exist when the result does not equal to **PASS**. */ - @JsonProperty("message") private String message; /** The results of the tested operation. */ - @JsonProperty("result") private ValidateCredentialResult result; public CredentialValidationResult setMessage(String message) { @@ -55,4 +64,43 @@ public String toString() { .add("result", result) .toString(); } + + CredentialValidationResultPb toPb() { + CredentialValidationResultPb pb = new CredentialValidationResultPb(); + pb.setMessage(message); + pb.setResult(result); + + return pb; + } + + static CredentialValidationResult fromPb(CredentialValidationResultPb pb) { + CredentialValidationResult model = new CredentialValidationResult(); + model.setMessage(pb.getMessage()); + model.setResult(pb.getResult()); + + return model; + } + + public static class CredentialValidationResultSerializer + extends JsonSerializer { + @Override + public void serialize( + CredentialValidationResult value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CredentialValidationResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CredentialValidationResultDeserializer + extends JsonDeserializer { + @Override + public CredentialValidationResult deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CredentialValidationResultPb pb = mapper.readValue(p, CredentialValidationResultPb.class); + return CredentialValidationResult.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResultPb.java new file mode 100755 index 000000000..8a4b1a079 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResultPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CredentialValidationResultPb { + @JsonProperty("message") + private String message; + + @JsonProperty("result") + private ValidateCredentialResult result; + + public CredentialValidationResultPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public CredentialValidationResultPb setResult(ValidateCredentialResult result) { + this.result = result; + return this; + } + + public ValidateCredentialResult getResult() { + return result; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialValidationResultPb that = (CredentialValidationResultPb) o; + return Objects.equals(message, that.message) && Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(message, result); + } + + @Override + public String toString() { + return new ToStringer(CredentialValidationResultPb.class) + .add("message", message) + .add("result", result) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java index 1557d0944..a3ce8b4f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java @@ -21,7 +21,7 @@ public CredentialInfo createCredential(CreateCredentialRequest request) { String path = "/api/2.1/unity-catalog/credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CredentialInfo.class); @@ -35,7 +35,7 @@ public void deleteCredential(DeleteCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteCredentialResponse.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public TemporaryCredentials generateTemporaryServiceCredential( String path = "/api/2.1/unity-catalog/temporary-service-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TemporaryCredentials.class); @@ -63,7 +63,7 @@ public CredentialInfo getCredential(GetCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CredentialInfo.class); } catch (IOException e) { @@ -76,7 +76,7 @@ public ListCredentialsResponse listCredentials(ListCredentialsRequest request) { String path = "/api/2.1/unity-catalog/credentials"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListCredentialsResponse.class); } catch (IOException e) { @@ -89,7 +89,7 @@ public CredentialInfo updateCredential(UpdateCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CredentialInfo.class); @@ -103,7 +103,7 @@ public ValidateCredentialResponse validateCredential(ValidateCredentialRequest r String path = "/api/2.1/unity-catalog/validate-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ValidateCredentialResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java index 390833376..7ef79065c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java @@ -4,26 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ @Generated +@JsonSerialize(using = DatabricksGcpServiceAccount.DatabricksGcpServiceAccountSerializer.class) +@JsonDeserialize(using = DatabricksGcpServiceAccount.DatabricksGcpServiceAccountDeserializer.class) public class DatabricksGcpServiceAccount { /** * The Databricks internal ID that represents this managed identity. This field is only used to * persist the credential_id once it is fetched from the credentials manager - as we only use the * protobuf serializer to store credentials, this ID gets persisted to the database */ - @JsonProperty("credential_id") private String credentialId; /** The email of the service account. */ - @JsonProperty("email") private String email; /** The ID that represents the private key for this Service Account */ - @JsonProperty("private_key_id") private String privateKeyId; public DatabricksGcpServiceAccount setCredentialId(String credentialId) { @@ -76,4 +84,45 @@ public String toString() { .add("privateKeyId", privateKeyId) .toString(); } + + DatabricksGcpServiceAccountPb toPb() { + DatabricksGcpServiceAccountPb pb = new DatabricksGcpServiceAccountPb(); + pb.setCredentialId(credentialId); + pb.setEmail(email); + pb.setPrivateKeyId(privateKeyId); + + return pb; + } + + static DatabricksGcpServiceAccount fromPb(DatabricksGcpServiceAccountPb pb) { + DatabricksGcpServiceAccount model = new DatabricksGcpServiceAccount(); + model.setCredentialId(pb.getCredentialId()); + model.setEmail(pb.getEmail()); + model.setPrivateKeyId(pb.getPrivateKeyId()); + + return model; + } + + public static class DatabricksGcpServiceAccountSerializer + extends JsonSerializer { + @Override + public void serialize( + DatabricksGcpServiceAccount value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabricksGcpServiceAccountPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabricksGcpServiceAccountDeserializer + extends JsonDeserializer { + @Override + public DatabricksGcpServiceAccount deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabricksGcpServiceAccountPb pb = mapper.readValue(p, DatabricksGcpServiceAccountPb.class); + return DatabricksGcpServiceAccount.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountPb.java new file mode 100755 index 000000000..ddfb321f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ +@Generated +class DatabricksGcpServiceAccountPb { + @JsonProperty("credential_id") + private String credentialId; + + @JsonProperty("email") + private String email; + + @JsonProperty("private_key_id") + private String privateKeyId; + + public DatabricksGcpServiceAccountPb setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public DatabricksGcpServiceAccountPb setEmail(String email) { + this.email = email; + return this; + } + + public String getEmail() { + return email; + } + + public DatabricksGcpServiceAccountPb setPrivateKeyId(String privateKeyId) { + this.privateKeyId = privateKeyId; + return this; + } + + public String getPrivateKeyId() { + return privateKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksGcpServiceAccountPb that = (DatabricksGcpServiceAccountPb) o; + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(email, that.email) + && Objects.equals(privateKeyId, that.privateKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, email, privateKeyId); + } + + @Override + public String toString() { + return new ToStringer(DatabricksGcpServiceAccountPb.class) + .add("credentialId", credentialId) + .add("email", email) + .add("privateKeyId", privateKeyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java index 82c772aaf..7c811d917 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DatabricksGcpServiceAccountRequest.DatabricksGcpServiceAccountRequestSerializer.class) +@JsonDeserialize( + using = DatabricksGcpServiceAccountRequest.DatabricksGcpServiceAccountRequestDeserializer.class) public class DatabricksGcpServiceAccountRequest { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(DatabricksGcpServiceAccountRequest.class).toString(); } + + DatabricksGcpServiceAccountRequestPb toPb() { + DatabricksGcpServiceAccountRequestPb pb = new DatabricksGcpServiceAccountRequestPb(); + + return pb; + } + + static DatabricksGcpServiceAccountRequest fromPb(DatabricksGcpServiceAccountRequestPb pb) { + DatabricksGcpServiceAccountRequest model = new DatabricksGcpServiceAccountRequest(); + + return model; + } + + public static class DatabricksGcpServiceAccountRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DatabricksGcpServiceAccountRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabricksGcpServiceAccountRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabricksGcpServiceAccountRequestDeserializer + extends JsonDeserializer { + @Override + public DatabricksGcpServiceAccountRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabricksGcpServiceAccountRequestPb pb = + mapper.readValue(p, DatabricksGcpServiceAccountRequestPb.class); + return DatabricksGcpServiceAccountRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequestPb.java new file mode 100755 index 000000000..4f3571570 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequestPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DatabricksGcpServiceAccountRequestPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DatabricksGcpServiceAccountRequestPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java index 5ebfeb231..34bb88606 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java @@ -4,19 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DatabricksGcpServiceAccountResponse.DatabricksGcpServiceAccountResponseSerializer.class) +@JsonDeserialize( + using = + DatabricksGcpServiceAccountResponse.DatabricksGcpServiceAccountResponseDeserializer.class) public class DatabricksGcpServiceAccountResponse { /** * The Databricks internal ID that represents this service account. This is an output-only field. */ - @JsonProperty("credential_id") private String credentialId; /** The email of the service account. This is an output-only field. */ - @JsonProperty("email") private String email; public DatabricksGcpServiceAccountResponse setCredentialId(String credentialId) { @@ -57,4 +69,44 @@ public String toString() { .add("email", email) .toString(); } + + DatabricksGcpServiceAccountResponsePb toPb() { + DatabricksGcpServiceAccountResponsePb pb = new DatabricksGcpServiceAccountResponsePb(); + pb.setCredentialId(credentialId); + pb.setEmail(email); + + return pb; + } + + static DatabricksGcpServiceAccountResponse fromPb(DatabricksGcpServiceAccountResponsePb pb) { + DatabricksGcpServiceAccountResponse model = new DatabricksGcpServiceAccountResponse(); + model.setCredentialId(pb.getCredentialId()); + model.setEmail(pb.getEmail()); + + return model; + } + + public static class DatabricksGcpServiceAccountResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DatabricksGcpServiceAccountResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabricksGcpServiceAccountResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabricksGcpServiceAccountResponseDeserializer + extends JsonDeserializer { + @Override + public DatabricksGcpServiceAccountResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabricksGcpServiceAccountResponsePb pb = + mapper.readValue(p, DatabricksGcpServiceAccountResponsePb.class); + return DatabricksGcpServiceAccountResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponsePb.java new file mode 100755 index 000000000..ee0ee23a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DatabricksGcpServiceAccountResponsePb { + @JsonProperty("credential_id") + private String credentialId; + + @JsonProperty("email") + private String email; + + public DatabricksGcpServiceAccountResponsePb setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public DatabricksGcpServiceAccountResponsePb setEmail(String email) { + this.email = email; + return this; + } + + public String getEmail() { + return email; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksGcpServiceAccountResponsePb that = (DatabricksGcpServiceAccountResponsePb) o; + return Objects.equals(credentialId, that.credentialId) && Objects.equals(email, that.email); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, email); + } + + @Override + public String toString() { + return new ToStringer(DatabricksGcpServiceAccountResponsePb.class) + .add("credentialId", credentialId) + .add("email", email) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java index d0acb5c9c..ba4df02c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java @@ -4,17 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a metastore assignment */ @Generated +@JsonSerialize( + using = + DeleteAccountMetastoreAssignmentRequest.DeleteAccountMetastoreAssignmentRequestSerializer + .class) +@JsonDeserialize( + using = + DeleteAccountMetastoreAssignmentRequest.DeleteAccountMetastoreAssignmentRequestDeserializer + .class) public class DeleteAccountMetastoreAssignmentRequest { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public DeleteAccountMetastoreAssignmentRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -55,4 +72,47 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + DeleteAccountMetastoreAssignmentRequestPb toPb() { + DeleteAccountMetastoreAssignmentRequestPb pb = new DeleteAccountMetastoreAssignmentRequestPb(); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static DeleteAccountMetastoreAssignmentRequest fromPb( + DeleteAccountMetastoreAssignmentRequestPb pb) { + DeleteAccountMetastoreAssignmentRequest model = new DeleteAccountMetastoreAssignmentRequest(); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class DeleteAccountMetastoreAssignmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountMetastoreAssignmentRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteAccountMetastoreAssignmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountMetastoreAssignmentRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountMetastoreAssignmentRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountMetastoreAssignmentRequestPb pb = + mapper.readValue(p, DeleteAccountMetastoreAssignmentRequestPb.class); + return DeleteAccountMetastoreAssignmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequestPb.java new file mode 100755 index 000000000..f659a18c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a metastore assignment */ +@Generated +class DeleteAccountMetastoreAssignmentRequestPb { + @JsonIgnore private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public DeleteAccountMetastoreAssignmentRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public DeleteAccountMetastoreAssignmentRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountMetastoreAssignmentRequestPb that = (DeleteAccountMetastoreAssignmentRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountMetastoreAssignmentRequestPb.class) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java index 92f93166c..0a1c45129 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java @@ -3,21 +3,30 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a metastore */ @Generated +@JsonSerialize(using = DeleteAccountMetastoreRequest.DeleteAccountMetastoreRequestSerializer.class) +@JsonDeserialize( + using = DeleteAccountMetastoreRequest.DeleteAccountMetastoreRequestDeserializer.class) public class DeleteAccountMetastoreRequest { /** Force deletion even if the metastore is not empty. Default is false. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; public DeleteAccountMetastoreRequest setForce(Boolean force) { this.force = force; @@ -57,4 +66,44 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + DeleteAccountMetastoreRequestPb toPb() { + DeleteAccountMetastoreRequestPb pb = new DeleteAccountMetastoreRequestPb(); + pb.setForce(force); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static DeleteAccountMetastoreRequest fromPb(DeleteAccountMetastoreRequestPb pb) { + DeleteAccountMetastoreRequest model = new DeleteAccountMetastoreRequest(); + model.setForce(pb.getForce()); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class DeleteAccountMetastoreRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountMetastoreRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountMetastoreRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountMetastoreRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountMetastoreRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountMetastoreRequestPb pb = + mapper.readValue(p, DeleteAccountMetastoreRequestPb.class); + return DeleteAccountMetastoreRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequestPb.java new file mode 100755 index 000000000..c5ca78fac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a metastore */ +@Generated +class DeleteAccountMetastoreRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String metastoreId; + + public DeleteAccountMetastoreRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteAccountMetastoreRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountMetastoreRequestPb that = (DeleteAccountMetastoreRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(force, metastoreId); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountMetastoreRequestPb.class) + .add("force", force) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java index d0280e9fd..9e4b1dcb8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java @@ -3,24 +3,37 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a storage credential */ @Generated +@JsonSerialize( + using = + DeleteAccountStorageCredentialRequest.DeleteAccountStorageCredentialRequestSerializer.class) +@JsonDeserialize( + using = + DeleteAccountStorageCredentialRequest.DeleteAccountStorageCredentialRequestDeserializer + .class) public class DeleteAccountStorageCredentialRequest { /** Force deletion even if the Storage Credential is not empty. Default is false. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Name of the storage credential. */ - @JsonIgnore private String storageCredentialName; + private String storageCredentialName; public DeleteAccountStorageCredentialRequest setForce(Boolean force) { this.force = force; @@ -73,4 +86,46 @@ public String toString() { .add("storageCredentialName", storageCredentialName) .toString(); } + + DeleteAccountStorageCredentialRequestPb toPb() { + DeleteAccountStorageCredentialRequestPb pb = new DeleteAccountStorageCredentialRequestPb(); + pb.setForce(force); + pb.setMetastoreId(metastoreId); + pb.setStorageCredentialName(storageCredentialName); + + return pb; + } + + static DeleteAccountStorageCredentialRequest fromPb(DeleteAccountStorageCredentialRequestPb pb) { + DeleteAccountStorageCredentialRequest model = new DeleteAccountStorageCredentialRequest(); + model.setForce(pb.getForce()); + model.setMetastoreId(pb.getMetastoreId()); + model.setStorageCredentialName(pb.getStorageCredentialName()); + + return model; + } + + public static class DeleteAccountStorageCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountStorageCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountStorageCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountStorageCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountStorageCredentialRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountStorageCredentialRequestPb pb = + mapper.readValue(p, DeleteAccountStorageCredentialRequestPb.class); + return DeleteAccountStorageCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequestPb.java new file mode 100755 index 000000000..2c2acd2ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequestPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a storage credential */ +@Generated +class DeleteAccountStorageCredentialRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String metastoreId; + + @JsonIgnore private String storageCredentialName; + + public DeleteAccountStorageCredentialRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteAccountStorageCredentialRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public DeleteAccountStorageCredentialRequestPb setStorageCredentialName( + String storageCredentialName) { + this.storageCredentialName = storageCredentialName; + return this; + } + + public String getStorageCredentialName() { + return storageCredentialName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountStorageCredentialRequestPb that = (DeleteAccountStorageCredentialRequestPb) o; + return Objects.equals(force, that.force) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(storageCredentialName, that.storageCredentialName); + } + + @Override + public int hashCode() { + return Objects.hash(force, metastoreId, storageCredentialName); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountStorageCredentialRequestPb.class) + .add("force", force) + .add("metastoreId", metastoreId) + .add("storageCredentialName", storageCredentialName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java index 393e8801f..4b5254511 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a Registered Model Alias */ @Generated +@JsonSerialize(using = DeleteAliasRequest.DeleteAliasRequestSerializer.class) +@JsonDeserialize(using = DeleteAliasRequest.DeleteAliasRequestDeserializer.class) public class DeleteAliasRequest { /** The name of the alias */ - @JsonIgnore private String alias; + private String alias; /** The three-level (fully qualified) name of the registered model */ - @JsonIgnore private String fullName; + private String fullName; public DeleteAliasRequest setAlias(String alias) { this.alias = alias; @@ -54,4 +65,40 @@ public String toString() { .add("fullName", fullName) .toString(); } + + DeleteAliasRequestPb toPb() { + DeleteAliasRequestPb pb = new DeleteAliasRequestPb(); + pb.setAlias(alias); + pb.setFullName(fullName); + + return pb; + } + + static DeleteAliasRequest fromPb(DeleteAliasRequestPb pb) { + DeleteAliasRequest model = new DeleteAliasRequest(); + model.setAlias(pb.getAlias()); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class DeleteAliasRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteAliasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAliasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAliasRequestDeserializer extends JsonDeserializer { + @Override + public DeleteAliasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAliasRequestPb pb = mapper.readValue(p, DeleteAliasRequestPb.class); + return DeleteAliasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequestPb.java new file mode 100755 index 000000000..c5a5a82c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Registered Model Alias */ +@Generated +class DeleteAliasRequestPb { + @JsonIgnore private String alias; + + @JsonIgnore private String fullName; + + public DeleteAliasRequestPb setAlias(String alias) { + this.alias = alias; + return this; + } + + public String getAlias() { + return alias; + } + + public DeleteAliasRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAliasRequestPb that = (DeleteAliasRequestPb) o; + return Objects.equals(alias, that.alias) && Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(alias, fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteAliasRequestPb.class) + .add("alias", alias) + .add("fullName", fullName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java index 9bb22645b..5ee0e0ff7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteAliasResponse.DeleteAliasResponseSerializer.class) +@JsonDeserialize(using = DeleteAliasResponse.DeleteAliasResponseDeserializer.class) public class DeleteAliasResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAliasResponse.class).toString(); } + + DeleteAliasResponsePb toPb() { + DeleteAliasResponsePb pb = new DeleteAliasResponsePb(); + + return pb; + } + + static DeleteAliasResponse fromPb(DeleteAliasResponsePb pb) { + DeleteAliasResponse model = new DeleteAliasResponse(); + + return model; + } + + public static class DeleteAliasResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteAliasResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAliasResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAliasResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteAliasResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAliasResponsePb pb = mapper.readValue(p, DeleteAliasResponsePb.class); + return DeleteAliasResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponsePb.java new file mode 100755 index 000000000..6e0af8b53 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteAliasResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteAliasResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java index 160821d70..627aad100 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a catalog */ @Generated +@JsonSerialize(using = DeleteCatalogRequest.DeleteCatalogRequestSerializer.class) +@JsonDeserialize(using = DeleteCatalogRequest.DeleteCatalogRequestDeserializer.class) public class DeleteCatalogRequest { /** Force deletion even if the catalog is not empty. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** The name of the catalog. */ - @JsonIgnore private String name; + private String name; public DeleteCatalogRequest setForce(Boolean force) { this.force = force; @@ -57,4 +65,42 @@ public String toString() { .add("name", name) .toString(); } + + DeleteCatalogRequestPb toPb() { + DeleteCatalogRequestPb pb = new DeleteCatalogRequestPb(); + pb.setForce(force); + pb.setName(name); + + return pb; + } + + static DeleteCatalogRequest fromPb(DeleteCatalogRequestPb pb) { + DeleteCatalogRequest model = new DeleteCatalogRequest(); + model.setForce(pb.getForce()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteCatalogRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCatalogRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCatalogRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCatalogRequestPb pb = mapper.readValue(p, DeleteCatalogRequestPb.class); + return DeleteCatalogRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequestPb.java new file mode 100755 index 000000000..9c86762fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a catalog */ +@Generated +class DeleteCatalogRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String name; + + public DeleteCatalogRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteCatalogRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCatalogRequestPb that = (DeleteCatalogRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(force, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteCatalogRequestPb.class) + .add("force", force) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java index b294cd3a3..b8eba7d68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a connection */ @Generated +@JsonSerialize(using = DeleteConnectionRequest.DeleteConnectionRequestSerializer.class) +@JsonDeserialize(using = DeleteConnectionRequest.DeleteConnectionRequestDeserializer.class) public class DeleteConnectionRequest { /** The name of the connection to be deleted. */ - @JsonIgnore private String name; + private String name; public DeleteConnectionRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteConnectionRequest.class).add("name", name).toString(); } + + DeleteConnectionRequestPb toPb() { + DeleteConnectionRequestPb pb = new DeleteConnectionRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteConnectionRequest fromPb(DeleteConnectionRequestPb pb) { + DeleteConnectionRequest model = new DeleteConnectionRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteConnectionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteConnectionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteConnectionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteConnectionRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteConnectionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteConnectionRequestPb pb = mapper.readValue(p, DeleteConnectionRequestPb.class); + return DeleteConnectionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequestPb.java similarity index 74% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequestPb.java index cd9b08f25..d11c16a62 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequestPb.java @@ -7,13 +7,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get a Database Catalog */ +/** Delete a connection */ @Generated -public class GetDatabaseCatalogRequest { - /** */ +class DeleteConnectionRequestPb { @JsonIgnore private String name; - public GetDatabaseCatalogRequest setName(String name) { + public DeleteConnectionRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +25,7 @@ public String getName() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetDatabaseCatalogRequest that = (GetDatabaseCatalogRequest) o; + DeleteConnectionRequestPb that = (DeleteConnectionRequestPb) o; return Objects.equals(name, that.name); } @@ -37,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetDatabaseCatalogRequest.class).add("name", name).toString(); + return new ToStringer(DeleteConnectionRequestPb.class).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java index 2a771f6bb..f8d0f169e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a credential */ @Generated +@JsonSerialize(using = DeleteCredentialRequest.DeleteCredentialRequestSerializer.class) +@JsonDeserialize(using = DeleteCredentialRequest.DeleteCredentialRequestDeserializer.class) public class DeleteCredentialRequest { /** * Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent * external locations and external tables (when purpose is **STORAGE**). */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Name of the credential. */ - @JsonIgnore private String nameArg; + private String nameArg; public DeleteCredentialRequest setForce(Boolean force) { this.force = force; @@ -60,4 +68,43 @@ public String toString() { .add("nameArg", nameArg) .toString(); } + + DeleteCredentialRequestPb toPb() { + DeleteCredentialRequestPb pb = new DeleteCredentialRequestPb(); + pb.setForce(force); + pb.setNameArg(nameArg); + + return pb; + } + + static DeleteCredentialRequest fromPb(DeleteCredentialRequestPb pb) { + DeleteCredentialRequest model = new DeleteCredentialRequest(); + model.setForce(pb.getForce()); + model.setNameArg(pb.getNameArg()); + + return model; + } + + public static class DeleteCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCredentialRequestPb pb = mapper.readValue(p, DeleteCredentialRequestPb.class); + return DeleteCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequestPb.java new file mode 100755 index 000000000..694ab84b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a credential */ +@Generated +class DeleteCredentialRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String nameArg; + + public DeleteCredentialRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteCredentialRequestPb setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCredentialRequestPb that = (DeleteCredentialRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(force, nameArg); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialRequestPb.class) + .add("force", force) + .add("nameArg", nameArg) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java index 1ad278759..64a4fd86f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteCredentialResponse.DeleteCredentialResponseSerializer.class) +@JsonDeserialize(using = DeleteCredentialResponse.DeleteCredentialResponseDeserializer.class) public class DeleteCredentialResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCredentialResponse.class).toString(); } + + DeleteCredentialResponsePb toPb() { + DeleteCredentialResponsePb pb = new DeleteCredentialResponsePb(); + + return pb; + } + + static DeleteCredentialResponse fromPb(DeleteCredentialResponsePb pb) { + DeleteCredentialResponse model = new DeleteCredentialResponse(); + + return model; + } + + public static class DeleteCredentialResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCredentialResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCredentialResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCredentialResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteCredentialResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCredentialResponsePb pb = mapper.readValue(p, DeleteCredentialResponsePb.class); + return DeleteCredentialResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponsePb.java new file mode 100755 index 000000000..6e455e66b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteCredentialResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java index ffdf3ba52..667e3b12f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java @@ -3,21 +3,30 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an external location */ @Generated +@JsonSerialize(using = DeleteExternalLocationRequest.DeleteExternalLocationRequestSerializer.class) +@JsonDeserialize( + using = DeleteExternalLocationRequest.DeleteExternalLocationRequestDeserializer.class) public class DeleteExternalLocationRequest { /** Force deletion even if there are dependent external tables or mounts. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Name of the external location. */ - @JsonIgnore private String name; + private String name; public DeleteExternalLocationRequest setForce(Boolean force) { this.force = force; @@ -57,4 +66,44 @@ public String toString() { .add("name", name) .toString(); } + + DeleteExternalLocationRequestPb toPb() { + DeleteExternalLocationRequestPb pb = new DeleteExternalLocationRequestPb(); + pb.setForce(force); + pb.setName(name); + + return pb; + } + + static DeleteExternalLocationRequest fromPb(DeleteExternalLocationRequestPb pb) { + DeleteExternalLocationRequest model = new DeleteExternalLocationRequest(); + model.setForce(pb.getForce()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteExternalLocationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExternalLocationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExternalLocationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExternalLocationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteExternalLocationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExternalLocationRequestPb pb = + mapper.readValue(p, DeleteExternalLocationRequestPb.class); + return DeleteExternalLocationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequestPb.java new file mode 100755 index 000000000..f040a17f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an external location */ +@Generated +class DeleteExternalLocationRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String name; + + public DeleteExternalLocationRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteExternalLocationRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExternalLocationRequestPb that = (DeleteExternalLocationRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(force, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteExternalLocationRequestPb.class) + .add("force", force) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java index 1818ede51..23301dd2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a function */ @Generated +@JsonSerialize(using = DeleteFunctionRequest.DeleteFunctionRequestSerializer.class) +@JsonDeserialize(using = DeleteFunctionRequest.DeleteFunctionRequestDeserializer.class) public class DeleteFunctionRequest { /** Force deletion even if the function is notempty. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** * The fully-qualified name of the function (of the form * __catalog_name__.__schema_name__.__function__name__). */ - @JsonIgnore private String name; + private String name; public DeleteFunctionRequest setForce(Boolean force) { this.force = force; @@ -60,4 +68,43 @@ public String toString() { .add("name", name) .toString(); } + + DeleteFunctionRequestPb toPb() { + DeleteFunctionRequestPb pb = new DeleteFunctionRequestPb(); + pb.setForce(force); + pb.setName(name); + + return pb; + } + + static DeleteFunctionRequest fromPb(DeleteFunctionRequestPb pb) { + DeleteFunctionRequest model = new DeleteFunctionRequest(); + model.setForce(pb.getForce()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteFunctionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteFunctionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteFunctionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteFunctionRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteFunctionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteFunctionRequestPb pb = mapper.readValue(p, DeleteFunctionRequestPb.class); + return DeleteFunctionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequestPb.java new file mode 100755 index 000000000..8b8f915bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a function */ +@Generated +class DeleteFunctionRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String name; + + public DeleteFunctionRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteFunctionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFunctionRequestPb that = (DeleteFunctionRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(force, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteFunctionRequestPb.class) + .add("force", force) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java index 3bc2a7251..a2a1235cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a metastore */ @Generated +@JsonSerialize(using = DeleteMetastoreRequest.DeleteMetastoreRequestSerializer.class) +@JsonDeserialize(using = DeleteMetastoreRequest.DeleteMetastoreRequestDeserializer.class) public class DeleteMetastoreRequest { /** Force deletion even if the metastore is not empty. Default is false. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Unique ID of the metastore. */ - @JsonIgnore private String id; + private String id; public DeleteMetastoreRequest setForce(Boolean force) { this.force = force; @@ -57,4 +65,43 @@ public String toString() { .add("id", id) .toString(); } + + DeleteMetastoreRequestPb toPb() { + DeleteMetastoreRequestPb pb = new DeleteMetastoreRequestPb(); + pb.setForce(force); + pb.setId(id); + + return pb; + } + + static DeleteMetastoreRequest fromPb(DeleteMetastoreRequestPb pb) { + DeleteMetastoreRequest model = new DeleteMetastoreRequest(); + model.setForce(pb.getForce()); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteMetastoreRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteMetastoreRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteMetastoreRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteMetastoreRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteMetastoreRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteMetastoreRequestPb pb = mapper.readValue(p, DeleteMetastoreRequestPb.class); + return DeleteMetastoreRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequestPb.java new file mode 100755 index 000000000..9197c2f51 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a metastore */ +@Generated +class DeleteMetastoreRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String id; + + public DeleteMetastoreRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteMetastoreRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteMetastoreRequestPb that = (DeleteMetastoreRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(force, id); + } + + @Override + public String toString() { + return new ToStringer(DeleteMetastoreRequestPb.class) + .add("force", force) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java index 45da03a28..1a8267801 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a Model Version */ @Generated +@JsonSerialize(using = DeleteModelVersionRequest.DeleteModelVersionRequestSerializer.class) +@JsonDeserialize(using = DeleteModelVersionRequest.DeleteModelVersionRequestDeserializer.class) public class DeleteModelVersionRequest { /** The three-level (fully qualified) name of the model version */ - @JsonIgnore private String fullName; + private String fullName; /** The integer version number of the model version */ - @JsonIgnore private Long version; + private Long version; public DeleteModelVersionRequest setFullName(String fullName) { this.fullName = fullName; @@ -54,4 +65,43 @@ public String toString() { .add("version", version) .toString(); } + + DeleteModelVersionRequestPb toPb() { + DeleteModelVersionRequestPb pb = new DeleteModelVersionRequestPb(); + pb.setFullName(fullName); + pb.setVersion(version); + + return pb; + } + + static DeleteModelVersionRequest fromPb(DeleteModelVersionRequestPb pb) { + DeleteModelVersionRequest model = new DeleteModelVersionRequest(); + model.setFullName(pb.getFullName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class DeleteModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelVersionRequestPb pb = mapper.readValue(p, DeleteModelVersionRequestPb.class); + return DeleteModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequestPb.java new file mode 100755 index 000000000..6799e9831 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Model Version */ +@Generated +class DeleteModelVersionRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore private Long version; + + public DeleteModelVersionRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public DeleteModelVersionRequestPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteModelVersionRequestPb that = (DeleteModelVersionRequestPb) o; + return Objects.equals(fullName, that.fullName) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, version); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelVersionRequestPb.class) + .add("fullName", fullName) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java index 004f187b0..a68c1eb61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an Online Table */ @Generated +@JsonSerialize(using = DeleteOnlineTableRequest.DeleteOnlineTableRequestSerializer.class) +@JsonDeserialize(using = DeleteOnlineTableRequest.DeleteOnlineTableRequestDeserializer.class) public class DeleteOnlineTableRequest { /** Full three-part (catalog, schema, table) name of the table. */ - @JsonIgnore private String name; + private String name; public DeleteOnlineTableRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteOnlineTableRequest.class).add("name", name).toString(); } + + DeleteOnlineTableRequestPb toPb() { + DeleteOnlineTableRequestPb pb = new DeleteOnlineTableRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteOnlineTableRequest fromPb(DeleteOnlineTableRequestPb pb) { + DeleteOnlineTableRequest model = new DeleteOnlineTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteOnlineTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteOnlineTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteOnlineTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteOnlineTableRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteOnlineTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteOnlineTableRequestPb pb = mapper.readValue(p, DeleteOnlineTableRequestPb.class); + return DeleteOnlineTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequestPb.java similarity index 71% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequestPb.java index 131d51e48..634123431 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequestPb.java @@ -7,13 +7,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get a Database Instance */ +/** Delete an Online Table */ @Generated -public class GetDatabaseInstanceRequest { - /** Name of the cluster to get. */ +class DeleteOnlineTableRequestPb { @JsonIgnore private String name; - public GetDatabaseInstanceRequest setName(String name) { + public DeleteOnlineTableRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +25,7 @@ public String getName() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetDatabaseInstanceRequest that = (GetDatabaseInstanceRequest) o; + DeleteOnlineTableRequestPb that = (DeleteOnlineTableRequestPb) o; return Objects.equals(name, that.name); } @@ -37,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetDatabaseInstanceRequest.class).add("name", name).toString(); + return new ToStringer(DeleteOnlineTableRequestPb.class).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java index 015df9599..83299616f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a table monitor */ @Generated +@JsonSerialize(using = DeleteQualityMonitorRequest.DeleteQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = DeleteQualityMonitorRequest.DeleteQualityMonitorRequestDeserializer.class) public class DeleteQualityMonitorRequest { /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public DeleteQualityMonitorRequest setTableName(String tableName) { this.tableName = tableName; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteQualityMonitorRequest.class).add("tableName", tableName).toString(); } + + DeleteQualityMonitorRequestPb toPb() { + DeleteQualityMonitorRequestPb pb = new DeleteQualityMonitorRequestPb(); + pb.setTableName(tableName); + + return pb; + } + + static DeleteQualityMonitorRequest fromPb(DeleteQualityMonitorRequestPb pb) { + DeleteQualityMonitorRequest model = new DeleteQualityMonitorRequest(); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class DeleteQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteQualityMonitorRequestPb pb = mapper.readValue(p, DeleteQualityMonitorRequestPb.class); + return DeleteQualityMonitorRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequestPb.java new file mode 100755 index 000000000..12217a4eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a table monitor */ +@Generated +class DeleteQualityMonitorRequestPb { + @JsonIgnore private String tableName; + + public DeleteQualityMonitorRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQualityMonitorRequestPb that = (DeleteQualityMonitorRequestPb) o; + return Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(tableName); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorRequestPb.class) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java index b8117cce2..68ef2b189 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a Registered Model */ @Generated +@JsonSerialize(using = DeleteRegisteredModelRequest.DeleteRegisteredModelRequestSerializer.class) +@JsonDeserialize( + using = DeleteRegisteredModelRequest.DeleteRegisteredModelRequestDeserializer.class) public class DeleteRegisteredModelRequest { /** The three-level (fully qualified) name of the registered model */ - @JsonIgnore private String fullName; + private String fullName; public DeleteRegisteredModelRequest setFullName(String fullName) { this.fullName = fullName; @@ -39,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRegisteredModelRequest.class).add("fullName", fullName).toString(); } + + DeleteRegisteredModelRequestPb toPb() { + DeleteRegisteredModelRequestPb pb = new DeleteRegisteredModelRequestPb(); + pb.setFullName(fullName); + + return pb; + } + + static DeleteRegisteredModelRequest fromPb(DeleteRegisteredModelRequestPb pb) { + DeleteRegisteredModelRequest model = new DeleteRegisteredModelRequest(); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class DeleteRegisteredModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteRegisteredModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRegisteredModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRegisteredModelRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteRegisteredModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRegisteredModelRequestPb pb = mapper.readValue(p, DeleteRegisteredModelRequestPb.class); + return DeleteRegisteredModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequestPb.java new file mode 100755 index 000000000..643227723 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Registered Model */ +@Generated +class DeleteRegisteredModelRequestPb { + @JsonIgnore private String fullName; + + public DeleteRegisteredModelRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRegisteredModelRequestPb that = (DeleteRegisteredModelRequestPb) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteRegisteredModelRequestPb.class) + .add("fullName", fullName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java index 72c77810c..9727fbcc7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponsePb.java new file mode 100755 index 000000000..610fc3f14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java index c774720e9..7df69d405 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a schema */ @Generated +@JsonSerialize(using = DeleteSchemaRequest.DeleteSchemaRequestSerializer.class) +@JsonDeserialize(using = DeleteSchemaRequest.DeleteSchemaRequestDeserializer.class) public class DeleteSchemaRequest { /** Force deletion even if the schema is not empty. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Full name of the schema. */ - @JsonIgnore private String fullName; + private String fullName; public DeleteSchemaRequest setForce(Boolean force) { this.force = force; @@ -57,4 +65,41 @@ public String toString() { .add("fullName", fullName) .toString(); } + + DeleteSchemaRequestPb toPb() { + DeleteSchemaRequestPb pb = new DeleteSchemaRequestPb(); + pb.setForce(force); + pb.setFullName(fullName); + + return pb; + } + + static DeleteSchemaRequest fromPb(DeleteSchemaRequestPb pb) { + DeleteSchemaRequest model = new DeleteSchemaRequest(); + model.setForce(pb.getForce()); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class DeleteSchemaRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteSchemaRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSchemaRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSchemaRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteSchemaRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSchemaRequestPb pb = mapper.readValue(p, DeleteSchemaRequestPb.class); + return DeleteSchemaRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequestPb.java new file mode 100755 index 000000000..9584c12cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a schema */ +@Generated +class DeleteSchemaRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String fullName; + + public DeleteSchemaRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteSchemaRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSchemaRequestPb that = (DeleteSchemaRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(force, fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteSchemaRequestPb.class) + .add("force", force) + .add("fullName", fullName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java index c9837e254..e373b2a93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java @@ -3,21 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a credential */ @Generated +@JsonSerialize( + using = DeleteStorageCredentialRequest.DeleteStorageCredentialRequestSerializer.class) +@JsonDeserialize( + using = DeleteStorageCredentialRequest.DeleteStorageCredentialRequestDeserializer.class) public class DeleteStorageCredentialRequest { /** Force deletion even if there are dependent external locations or external tables. */ - @JsonIgnore - @QueryParam("force") private Boolean force; /** Name of the storage credential. */ - @JsonIgnore private String name; + private String name; public DeleteStorageCredentialRequest setForce(Boolean force) { this.force = force; @@ -57,4 +67,44 @@ public String toString() { .add("name", name) .toString(); } + + DeleteStorageCredentialRequestPb toPb() { + DeleteStorageCredentialRequestPb pb = new DeleteStorageCredentialRequestPb(); + pb.setForce(force); + pb.setName(name); + + return pb; + } + + static DeleteStorageCredentialRequest fromPb(DeleteStorageCredentialRequestPb pb) { + DeleteStorageCredentialRequest model = new DeleteStorageCredentialRequest(); + model.setForce(pb.getForce()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteStorageCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteStorageCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteStorageCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteStorageCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteStorageCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteStorageCredentialRequestPb pb = + mapper.readValue(p, DeleteStorageCredentialRequestPb.class); + return DeleteStorageCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequestPb.java new file mode 100755 index 000000000..c34aae807 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a credential */ +@Generated +class DeleteStorageCredentialRequestPb { + @JsonIgnore + @QueryParam("force") + private Boolean force; + + @JsonIgnore private String name; + + public DeleteStorageCredentialRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteStorageCredentialRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteStorageCredentialRequestPb that = (DeleteStorageCredentialRequestPb) o; + return Objects.equals(force, that.force) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(force, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteStorageCredentialRequestPb.class) + .add("force", force) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java index db8a2c601..7bd56fba3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java @@ -3,29 +3,36 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a table constraint */ @Generated +@JsonSerialize(using = DeleteTableConstraintRequest.DeleteTableConstraintRequestSerializer.class) +@JsonDeserialize( + using = DeleteTableConstraintRequest.DeleteTableConstraintRequestDeserializer.class) public class DeleteTableConstraintRequest { /** * If true, try deleting all child constraints of the current constraint. If false, reject this * operation if the current constraint has any child constraints. */ - @JsonIgnore - @QueryParam("cascade") private Boolean cascade; /** The name of the constraint to delete. */ - @JsonIgnore - @QueryParam("constraint_name") private String constraintName; /** Full name of the table referenced by the constraint. */ - @JsonIgnore private String fullName; + private String fullName; public DeleteTableConstraintRequest setCascade(Boolean cascade) { this.cascade = cascade; @@ -77,4 +84,45 @@ public String toString() { .add("fullName", fullName) .toString(); } + + DeleteTableConstraintRequestPb toPb() { + DeleteTableConstraintRequestPb pb = new DeleteTableConstraintRequestPb(); + pb.setCascade(cascade); + pb.setConstraintName(constraintName); + pb.setFullName(fullName); + + return pb; + } + + static DeleteTableConstraintRequest fromPb(DeleteTableConstraintRequestPb pb) { + DeleteTableConstraintRequest model = new DeleteTableConstraintRequest(); + model.setCascade(pb.getCascade()); + model.setConstraintName(pb.getConstraintName()); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class DeleteTableConstraintRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteTableConstraintRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTableConstraintRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTableConstraintRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteTableConstraintRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTableConstraintRequestPb pb = mapper.readValue(p, DeleteTableConstraintRequestPb.class); + return DeleteTableConstraintRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequestPb.java new file mode 100755 index 000000000..c95d79402 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a table constraint */ +@Generated +class DeleteTableConstraintRequestPb { + @JsonIgnore + @QueryParam("cascade") + private Boolean cascade; + + @JsonIgnore + @QueryParam("constraint_name") + private String constraintName; + + @JsonIgnore private String fullName; + + public DeleteTableConstraintRequestPb setCascade(Boolean cascade) { + this.cascade = cascade; + return this; + } + + public Boolean getCascade() { + return cascade; + } + + public DeleteTableConstraintRequestPb setConstraintName(String constraintName) { + this.constraintName = constraintName; + return this; + } + + public String getConstraintName() { + return constraintName; + } + + public DeleteTableConstraintRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTableConstraintRequestPb that = (DeleteTableConstraintRequestPb) o; + return Objects.equals(cascade, that.cascade) + && Objects.equals(constraintName, that.constraintName) + && Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(cascade, constraintName, fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteTableConstraintRequestPb.class) + .add("cascade", cascade) + .add("constraintName", constraintName) + .add("fullName", fullName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java index b4d371447..2d9ef77d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a table */ @Generated +@JsonSerialize(using = DeleteTableRequest.DeleteTableRequestSerializer.class) +@JsonDeserialize(using = DeleteTableRequest.DeleteTableRequestDeserializer.class) public class DeleteTableRequest { /** Full name of the table. */ - @JsonIgnore private String fullName; + private String fullName; public DeleteTableRequest setFullName(String fullName) { this.fullName = fullName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteTableRequest.class).add("fullName", fullName).toString(); } + + DeleteTableRequestPb toPb() { + DeleteTableRequestPb pb = new DeleteTableRequestPb(); + pb.setFullName(fullName); + + return pb; + } + + static DeleteTableRequest fromPb(DeleteTableRequestPb pb) { + DeleteTableRequest model = new DeleteTableRequest(); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class DeleteTableRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTableRequestDeserializer extends JsonDeserializer { + @Override + public DeleteTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTableRequestPb pb = mapper.readValue(p, DeleteTableRequestPb.class); + return DeleteTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequestPb.java new file mode 100755 index 000000000..8aaa33ae3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a table */ +@Generated +class DeleteTableRequestPb { + @JsonIgnore private String fullName; + + public DeleteTableRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTableRequestPb that = (DeleteTableRequestPb) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteTableRequestPb.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java index 2fd732e7a..8f4d673e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a Volume */ @Generated +@JsonSerialize(using = DeleteVolumeRequest.DeleteVolumeRequestSerializer.class) +@JsonDeserialize(using = DeleteVolumeRequest.DeleteVolumeRequestDeserializer.class) public class DeleteVolumeRequest { /** The three-level (fully qualified) name of the volume */ - @JsonIgnore private String name; + private String name; public DeleteVolumeRequest setName(String name) { this.name = name; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteVolumeRequest.class).add("name", name).toString(); } + + DeleteVolumeRequestPb toPb() { + DeleteVolumeRequestPb pb = new DeleteVolumeRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteVolumeRequest fromPb(DeleteVolumeRequestPb pb) { + DeleteVolumeRequest model = new DeleteVolumeRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteVolumeRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteVolumeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteVolumeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteVolumeRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteVolumeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteVolumeRequestPb pb = mapper.readValue(p, DeleteVolumeRequestPb.class); + return DeleteVolumeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequestPb.java new file mode 100755 index 000000000..95650e672 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Volume */ +@Generated +class DeleteVolumeRequestPb { + @JsonIgnore private String name; + + public DeleteVolumeRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteVolumeRequestPb that = (DeleteVolumeRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteVolumeRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java index 8416788a2..dbb259863 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @@ -13,9 +22,11 @@ * does not contain **delta.*** (input) properties in __TableInfo.properties__. */ @Generated +@JsonSerialize(using = DeltaRuntimePropertiesKvPairs.DeltaRuntimePropertiesKvPairsSerializer.class) +@JsonDeserialize( + using = DeltaRuntimePropertiesKvPairs.DeltaRuntimePropertiesKvPairsDeserializer.class) public class DeltaRuntimePropertiesKvPairs { /** A map of key-value properties attached to the securable. */ - @JsonProperty("delta_runtime_properties") private Map deltaRuntimeProperties; public DeltaRuntimePropertiesKvPairs setDeltaRuntimeProperties( @@ -47,4 +58,42 @@ public String toString() { .add("deltaRuntimeProperties", deltaRuntimeProperties) .toString(); } + + DeltaRuntimePropertiesKvPairsPb toPb() { + DeltaRuntimePropertiesKvPairsPb pb = new DeltaRuntimePropertiesKvPairsPb(); + pb.setDeltaRuntimeProperties(deltaRuntimeProperties); + + return pb; + } + + static DeltaRuntimePropertiesKvPairs fromPb(DeltaRuntimePropertiesKvPairsPb pb) { + DeltaRuntimePropertiesKvPairs model = new DeltaRuntimePropertiesKvPairs(); + model.setDeltaRuntimeProperties(pb.getDeltaRuntimeProperties()); + + return model; + } + + public static class DeltaRuntimePropertiesKvPairsSerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaRuntimePropertiesKvPairs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaRuntimePropertiesKvPairsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaRuntimePropertiesKvPairsDeserializer + extends JsonDeserializer { + @Override + public DeltaRuntimePropertiesKvPairs deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaRuntimePropertiesKvPairsPb pb = + mapper.readValue(p, DeltaRuntimePropertiesKvPairsPb.class); + return DeltaRuntimePropertiesKvPairs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairsPb.java new file mode 100755 index 000000000..4ccac6348 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairsPb.java @@ -0,0 +1,49 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** + * Properties pertaining to the current state of the delta table as given by the commit server. This + * does not contain **delta.*** (input) properties in __TableInfo.properties__. + */ +@Generated +class DeltaRuntimePropertiesKvPairsPb { + @JsonProperty("delta_runtime_properties") + private Map deltaRuntimeProperties; + + public DeltaRuntimePropertiesKvPairsPb setDeltaRuntimeProperties( + Map deltaRuntimeProperties) { + this.deltaRuntimeProperties = deltaRuntimeProperties; + return this; + } + + public Map getDeltaRuntimeProperties() { + return deltaRuntimeProperties; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaRuntimePropertiesKvPairsPb that = (DeltaRuntimePropertiesKvPairsPb) o; + return Objects.equals(deltaRuntimeProperties, that.deltaRuntimeProperties); + } + + @Override + public int hashCode() { + return Objects.hash(deltaRuntimeProperties); + } + + @Override + public String toString() { + return new ToStringer(DeltaRuntimePropertiesKvPairsPb.class) + .add("deltaRuntimeProperties", deltaRuntimeProperties) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java similarity index 67% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java index acfbdae1d..d862afd5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java @@ -4,9 +4,8 @@ import com.databricks.sdk.support.Generated; -/** The scope of Delta Sharing enabled for the metastore. */ @Generated -public enum MetastoreInfoDeltaSharingScope { +public enum DeltaSharingScopeEnum { INTERNAL, INTERNAL_AND_EXTERNAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java index 71d02c997..b84695539 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,13 +21,13 @@ * defined. */ @Generated +@JsonSerialize(using = Dependency.DependencySerializer.class) +@JsonDeserialize(using = Dependency.DependencyDeserializer.class) public class Dependency { /** A function that is dependent on a SQL object. */ - @JsonProperty("function") private FunctionDependency function; /** A table that is dependent on a SQL object. */ - @JsonProperty("table") private TableDependency table; public Dependency setFunction(FunctionDependency function) { @@ -59,4 +68,39 @@ public String toString() { .add("table", table) .toString(); } + + DependencyPb toPb() { + DependencyPb pb = new DependencyPb(); + pb.setFunction(function); + pb.setTable(table); + + return pb; + } + + static Dependency fromPb(DependencyPb pb) { + Dependency model = new Dependency(); + model.setFunction(pb.getFunction()); + model.setTable(pb.getTable()); + + return model; + } + + public static class DependencySerializer extends JsonSerializer { + @Override + public void serialize(Dependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DependencyDeserializer extends JsonDeserializer { + @Override + public Dependency deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DependencyPb pb = mapper.readValue(p, DependencyPb.class); + return Dependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java index e8caef070..883e63efe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A list of dependencies. */ @Generated +@JsonSerialize(using = DependencyList.DependencyListSerializer.class) +@JsonDeserialize(using = DependencyList.DependencyListDeserializer.class) public class DependencyList { /** Array of dependencies. */ - @JsonProperty("dependencies") private Collection dependencies; public DependencyList setDependencies(Collection dependencies) { @@ -41,4 +51,38 @@ public int hashCode() { public String toString() { return new ToStringer(DependencyList.class).add("dependencies", dependencies).toString(); } + + DependencyListPb toPb() { + DependencyListPb pb = new DependencyListPb(); + pb.setDependencies(dependencies); + + return pb; + } + + static DependencyList fromPb(DependencyListPb pb) { + DependencyList model = new DependencyList(); + model.setDependencies(pb.getDependencies()); + + return model; + } + + public static class DependencyListSerializer extends JsonSerializer { + @Override + public void serialize(DependencyList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DependencyListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DependencyListDeserializer extends JsonDeserializer { + @Override + public DependencyList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DependencyListPb pb = mapper.readValue(p, DependencyListPb.class); + return DependencyList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyListPb.java new file mode 100755 index 000000000..c3d790907 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyListPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A list of dependencies. */ +@Generated +class DependencyListPb { + @JsonProperty("dependencies") + private Collection dependencies; + + public DependencyListPb setDependencies(Collection dependencies) { + this.dependencies = dependencies; + return this; + } + + public Collection getDependencies() { + return dependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DependencyListPb that = (DependencyListPb) o; + return Objects.equals(dependencies, that.dependencies); + } + + @Override + public int hashCode() { + return Objects.hash(dependencies); + } + + @Override + public String toString() { + return new ToStringer(DependencyListPb.class).add("dependencies", dependencies).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyPb.java new file mode 100755 index 000000000..fe76963ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A dependency of a SQL object. Either the __table__ field or the __function__ field must be + * defined. + */ +@Generated +class DependencyPb { + @JsonProperty("function") + private FunctionDependency function; + + @JsonProperty("table") + private TableDependency table; + + public DependencyPb setFunction(FunctionDependency function) { + this.function = function; + return this; + } + + public FunctionDependency getFunction() { + return function; + } + + public DependencyPb setTable(TableDependency table) { + this.table = table; + return this; + } + + public TableDependency getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DependencyPb that = (DependencyPb) o; + return Objects.equals(function, that.function) && Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(function, table); + } + + @Override + public String toString() { + return new ToStringer(DependencyPb.class) + .add("function", function) + .add("table", table) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java index 1e78370ba..15f301e0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Disable a system schema */ @Generated +@JsonSerialize(using = DisableRequest.DisableRequestSerializer.class) +@JsonDeserialize(using = DisableRequest.DisableRequestDeserializer.class) public class DisableRequest { /** The metastore ID under which the system schema lives. */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Full name of the system schema. */ - @JsonIgnore private String schemaName; + private String schemaName; public DisableRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -55,4 +66,40 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + DisableRequestPb toPb() { + DisableRequestPb pb = new DisableRequestPb(); + pb.setMetastoreId(metastoreId); + pb.setSchemaName(schemaName); + + return pb; + } + + static DisableRequest fromPb(DisableRequestPb pb) { + DisableRequest model = new DisableRequest(); + model.setMetastoreId(pb.getMetastoreId()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class DisableRequestSerializer extends JsonSerializer { + @Override + public void serialize(DisableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DisableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DisableRequestDeserializer extends JsonDeserializer { + @Override + public DisableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DisableRequestPb pb = mapper.readValue(p, DisableRequestPb.class); + return DisableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequestPb.java new file mode 100755 index 000000000..f2faee46c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Disable a system schema */ +@Generated +class DisableRequestPb { + @JsonIgnore private String metastoreId; + + @JsonIgnore private String schemaName; + + public DisableRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public DisableRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DisableRequestPb that = (DisableRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId, schemaName); + } + + @Override + public String toString() { + return new ToStringer(DisableRequestPb.class) + .add("metastoreId", metastoreId) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java index 682492951..406ff059d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DisableResponse.DisableResponseSerializer.class) +@JsonDeserialize(using = DisableResponse.DisableResponseDeserializer.class) public class DisableResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DisableResponse.class).toString(); } + + DisableResponsePb toPb() { + DisableResponsePb pb = new DisableResponsePb(); + + return pb; + } + + static DisableResponse fromPb(DisableResponsePb pb) { + DisableResponse model = new DisableResponse(); + + return model; + } + + public static class DisableResponseSerializer extends JsonSerializer { + @Override + public void serialize(DisableResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DisableResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DisableResponseDeserializer extends JsonDeserializer { + @Override + public DisableResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DisableResponsePb pb = mapper.readValue(p, DisableResponsePb.class); + return DisableResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponsePb.java new file mode 100755 index 000000000..1a1657b86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DisableResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DisableResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java index 61fc856a1..14590e2ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EffectivePermissionsList.EffectivePermissionsListSerializer.class) +@JsonDeserialize(using = EffectivePermissionsList.EffectivePermissionsListDeserializer.class) public class EffectivePermissionsList { /** The privileges conveyed to each principal (either directly or via inheritance) */ - @JsonProperty("privilege_assignments") private Collection privilegeAssignments; public EffectivePermissionsList setPrivilegeAssignments( @@ -43,4 +53,41 @@ public String toString() { .add("privilegeAssignments", privilegeAssignments) .toString(); } + + EffectivePermissionsListPb toPb() { + EffectivePermissionsListPb pb = new EffectivePermissionsListPb(); + pb.setPrivilegeAssignments(privilegeAssignments); + + return pb; + } + + static EffectivePermissionsList fromPb(EffectivePermissionsListPb pb) { + EffectivePermissionsList model = new EffectivePermissionsList(); + model.setPrivilegeAssignments(pb.getPrivilegeAssignments()); + + return model; + } + + public static class EffectivePermissionsListSerializer + extends JsonSerializer { + @Override + public void serialize( + EffectivePermissionsList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EffectivePermissionsListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EffectivePermissionsListDeserializer + extends JsonDeserializer { + @Override + public EffectivePermissionsList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EffectivePermissionsListPb pb = mapper.readValue(p, EffectivePermissionsListPb.class); + return EffectivePermissionsList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsListPb.java new file mode 100755 index 000000000..1b5aaf897 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsListPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EffectivePermissionsListPb { + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public EffectivePermissionsListPb setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EffectivePermissionsListPb that = (EffectivePermissionsListPb) o; + return Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(EffectivePermissionsListPb.class) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java index bdb08b43d..5d87c6520 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java @@ -4,27 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = EffectivePredictiveOptimizationFlag.EffectivePredictiveOptimizationFlagSerializer.class) +@JsonDeserialize( + using = + EffectivePredictiveOptimizationFlag.EffectivePredictiveOptimizationFlagDeserializer.class) public class EffectivePredictiveOptimizationFlag { /** * The name of the object from which the flag was inherited. If there was no inheritance, this * field is left blank. */ - @JsonProperty("inherited_from_name") private String inheritedFromName; /** * The type of the object from which the flag was inherited. If there was no inheritance, this * field is left blank. */ - @JsonProperty("inherited_from_type") private EffectivePredictiveOptimizationFlagInheritedFromType inheritedFromType; /** Whether predictive optimization should be enabled for this object and objects under it. */ - @JsonProperty("value") private EnablePredictiveOptimization value; public EffectivePredictiveOptimizationFlag setInheritedFromName(String inheritedFromName) { @@ -78,4 +89,46 @@ public String toString() { .add("value", value) .toString(); } + + EffectivePredictiveOptimizationFlagPb toPb() { + EffectivePredictiveOptimizationFlagPb pb = new EffectivePredictiveOptimizationFlagPb(); + pb.setInheritedFromName(inheritedFromName); + pb.setInheritedFromType(inheritedFromType); + pb.setValue(value); + + return pb; + } + + static EffectivePredictiveOptimizationFlag fromPb(EffectivePredictiveOptimizationFlagPb pb) { + EffectivePredictiveOptimizationFlag model = new EffectivePredictiveOptimizationFlag(); + model.setInheritedFromName(pb.getInheritedFromName()); + model.setInheritedFromType(pb.getInheritedFromType()); + model.setValue(pb.getValue()); + + return model; + } + + public static class EffectivePredictiveOptimizationFlagSerializer + extends JsonSerializer { + @Override + public void serialize( + EffectivePredictiveOptimizationFlag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EffectivePredictiveOptimizationFlagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EffectivePredictiveOptimizationFlagDeserializer + extends JsonDeserializer { + @Override + public EffectivePredictiveOptimizationFlag deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EffectivePredictiveOptimizationFlagPb pb = + mapper.readValue(p, EffectivePredictiveOptimizationFlagPb.class); + return EffectivePredictiveOptimizationFlag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagPb.java new file mode 100755 index 000000000..e8a32f762 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EffectivePredictiveOptimizationFlagPb { + @JsonProperty("inherited_from_name") + private String inheritedFromName; + + @JsonProperty("inherited_from_type") + private EffectivePredictiveOptimizationFlagInheritedFromType inheritedFromType; + + @JsonProperty("value") + private EnablePredictiveOptimization value; + + public EffectivePredictiveOptimizationFlagPb setInheritedFromName(String inheritedFromName) { + this.inheritedFromName = inheritedFromName; + return this; + } + + public String getInheritedFromName() { + return inheritedFromName; + } + + public EffectivePredictiveOptimizationFlagPb setInheritedFromType( + EffectivePredictiveOptimizationFlagInheritedFromType inheritedFromType) { + this.inheritedFromType = inheritedFromType; + return this; + } + + public EffectivePredictiveOptimizationFlagInheritedFromType getInheritedFromType() { + return inheritedFromType; + } + + public EffectivePredictiveOptimizationFlagPb setValue(EnablePredictiveOptimization value) { + this.value = value; + return this; + } + + public EnablePredictiveOptimization getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EffectivePredictiveOptimizationFlagPb that = (EffectivePredictiveOptimizationFlagPb) o; + return Objects.equals(inheritedFromName, that.inheritedFromName) + && Objects.equals(inheritedFromType, that.inheritedFromType) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(inheritedFromName, inheritedFromType, value); + } + + @Override + public String toString() { + return new ToStringer(EffectivePredictiveOptimizationFlagPb.class) + .add("inheritedFromName", inheritedFromName) + .add("inheritedFromType", inheritedFromType) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilege.java index 4ad89a111..17a31d9c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilege.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilege.java @@ -4,27 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EffectivePrivilege.EffectivePrivilegeSerializer.class) +@JsonDeserialize(using = EffectivePrivilege.EffectivePrivilegeDeserializer.class) public class EffectivePrivilege { /** * The full name of the object that conveys this privilege via inheritance. This field is omitted * when privilege is not inherited (it's assigned to the securable itself). */ - @JsonProperty("inherited_from_name") private String inheritedFromName; /** * The type of the object that conveys this privilege via inheritance. This field is omitted when * privilege is not inherited (it's assigned to the securable itself). */ - @JsonProperty("inherited_from_type") private SecurableType inheritedFromType; /** The privilege assigned to the principal. */ - @JsonProperty("privilege") private Privilege privilege; public EffectivePrivilege setInheritedFromName(String inheritedFromName) { @@ -77,4 +85,42 @@ public String toString() { .add("privilege", privilege) .toString(); } + + EffectivePrivilegePb toPb() { + EffectivePrivilegePb pb = new EffectivePrivilegePb(); + pb.setInheritedFromName(inheritedFromName); + pb.setInheritedFromType(inheritedFromType); + pb.setPrivilege(privilege); + + return pb; + } + + static EffectivePrivilege fromPb(EffectivePrivilegePb pb) { + EffectivePrivilege model = new EffectivePrivilege(); + model.setInheritedFromName(pb.getInheritedFromName()); + model.setInheritedFromType(pb.getInheritedFromType()); + model.setPrivilege(pb.getPrivilege()); + + return model; + } + + public static class EffectivePrivilegeSerializer extends JsonSerializer { + @Override + public void serialize(EffectivePrivilege value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EffectivePrivilegePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EffectivePrivilegeDeserializer extends JsonDeserializer { + @Override + public EffectivePrivilege deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EffectivePrivilegePb pb = mapper.readValue(p, EffectivePrivilegePb.class); + return EffectivePrivilege.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java index b7d320b97..844d68edb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EffectivePrivilegeAssignment.EffectivePrivilegeAssignmentSerializer.class) +@JsonDeserialize( + using = EffectivePrivilegeAssignment.EffectivePrivilegeAssignmentDeserializer.class) public class EffectivePrivilegeAssignment { /** The principal (user email address or group name). */ - @JsonProperty("principal") private String principal; /** The privileges conveyed to the principal (either directly or via inheritance). */ - @JsonProperty("privileges") private Collection privileges; public EffectivePrivilegeAssignment setPrincipal(String principal) { @@ -56,4 +66,43 @@ public String toString() { .add("privileges", privileges) .toString(); } + + EffectivePrivilegeAssignmentPb toPb() { + EffectivePrivilegeAssignmentPb pb = new EffectivePrivilegeAssignmentPb(); + pb.setPrincipal(principal); + pb.setPrivileges(privileges); + + return pb; + } + + static EffectivePrivilegeAssignment fromPb(EffectivePrivilegeAssignmentPb pb) { + EffectivePrivilegeAssignment model = new EffectivePrivilegeAssignment(); + model.setPrincipal(pb.getPrincipal()); + model.setPrivileges(pb.getPrivileges()); + + return model; + } + + public static class EffectivePrivilegeAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + EffectivePrivilegeAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EffectivePrivilegeAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EffectivePrivilegeAssignmentDeserializer + extends JsonDeserializer { + @Override + public EffectivePrivilegeAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EffectivePrivilegeAssignmentPb pb = mapper.readValue(p, EffectivePrivilegeAssignmentPb.class); + return EffectivePrivilegeAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignmentPb.java new file mode 100755 index 000000000..f75d78b0b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignmentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EffectivePrivilegeAssignmentPb { + @JsonProperty("principal") + private String principal; + + @JsonProperty("privileges") + private Collection privileges; + + public EffectivePrivilegeAssignmentPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public EffectivePrivilegeAssignmentPb setPrivileges(Collection privileges) { + this.privileges = privileges; + return this; + } + + public Collection getPrivileges() { + return privileges; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EffectivePrivilegeAssignmentPb that = (EffectivePrivilegeAssignmentPb) o; + return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges); + } + + @Override + public int hashCode() { + return Objects.hash(principal, privileges); + } + + @Override + public String toString() { + return new ToStringer(EffectivePrivilegeAssignmentPb.class) + .add("principal", principal) + .add("privileges", privileges) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegePb.java new file mode 100755 index 000000000..fbc5642a9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EffectivePrivilegePb { + @JsonProperty("inherited_from_name") + private String inheritedFromName; + + @JsonProperty("inherited_from_type") + private SecurableType inheritedFromType; + + @JsonProperty("privilege") + private Privilege privilege; + + public EffectivePrivilegePb setInheritedFromName(String inheritedFromName) { + this.inheritedFromName = inheritedFromName; + return this; + } + + public String getInheritedFromName() { + return inheritedFromName; + } + + public EffectivePrivilegePb setInheritedFromType(SecurableType inheritedFromType) { + this.inheritedFromType = inheritedFromType; + return this; + } + + public SecurableType getInheritedFromType() { + return inheritedFromType; + } + + public EffectivePrivilegePb setPrivilege(Privilege privilege) { + this.privilege = privilege; + return this; + } + + public Privilege getPrivilege() { + return privilege; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EffectivePrivilegePb that = (EffectivePrivilegePb) o; + return Objects.equals(inheritedFromName, that.inheritedFromName) + && Objects.equals(inheritedFromType, that.inheritedFromType) + && Objects.equals(privilege, that.privilege); + } + + @Override + public int hashCode() { + return Objects.hash(inheritedFromName, inheritedFromType, privilege); + } + + @Override + public String toString() { + return new ToStringer(EffectivePrivilegePb.class) + .add("inheritedFromName", inheritedFromName) + .add("inheritedFromType", inheritedFromType) + .add("privilege", privilege) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java index 7a0a9a477..3bd169658 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EnableRequest.EnableRequestSerializer.class) +@JsonDeserialize(using = EnableRequest.EnableRequestDeserializer.class) public class EnableRequest { /** the catalog for which the system schema is to enabled in */ - @JsonProperty("catalog_name") private String catalogName; /** The metastore ID under which the system schema lives. */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Full name of the system schema. */ - @JsonIgnore private String schemaName; + private String schemaName; public EnableRequest setCatalogName(String catalogName) { this.catalogName = catalogName; @@ -70,4 +79,41 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + EnableRequestPb toPb() { + EnableRequestPb pb = new EnableRequestPb(); + pb.setCatalogName(catalogName); + pb.setMetastoreId(metastoreId); + pb.setSchemaName(schemaName); + + return pb; + } + + static EnableRequest fromPb(EnableRequestPb pb) { + EnableRequest model = new EnableRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class EnableRequestSerializer extends JsonSerializer { + @Override + public void serialize(EnableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnableRequestDeserializer extends JsonDeserializer { + @Override + public EnableRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnableRequestPb pb = mapper.readValue(p, EnableRequestPb.class); + return EnableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequestPb.java new file mode 100755 index 000000000..24437aae8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequestPb.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnableRequestPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonIgnore private String metastoreId; + + @JsonIgnore private String schemaName; + + public EnableRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public EnableRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public EnableRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableRequestPb that = (EnableRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, metastoreId, schemaName); + } + + @Override + public String toString() { + return new ToStringer(EnableRequestPb.class) + .add("catalogName", catalogName) + .add("metastoreId", metastoreId) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java index 72ccaf810..aa8f79de2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EnableResponse.EnableResponseSerializer.class) +@JsonDeserialize(using = EnableResponse.EnableResponseDeserializer.class) public class EnableResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(EnableResponse.class).toString(); } + + EnableResponsePb toPb() { + EnableResponsePb pb = new EnableResponsePb(); + + return pb; + } + + static EnableResponse fromPb(EnableResponsePb pb) { + EnableResponse model = new EnableResponse(); + + return model; + } + + public static class EnableResponseSerializer extends JsonSerializer { + @Override + public void serialize(EnableResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnableResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnableResponseDeserializer extends JsonDeserializer { + @Override + public EnableResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnableResponsePb pb = mapper.readValue(p, EnableResponsePb.class); + return EnableResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponsePb.java new file mode 100755 index 000000000..a50e9e5fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EnableResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EnableResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java index efdee6e9b..bf13cbaf9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Encryption options that apply to clients connecting to cloud storage. */ @Generated +@JsonSerialize(using = EncryptionDetails.EncryptionDetailsSerializer.class) +@JsonDeserialize(using = EncryptionDetails.EncryptionDetailsDeserializer.class) public class EncryptionDetails { /** Server-Side Encryption properties for clients communicating with AWS s3. */ - @JsonProperty("sse_encryption_details") private SseEncryptionDetails sseEncryptionDetails; public EncryptionDetails setSseEncryptionDetails(SseEncryptionDetails sseEncryptionDetails) { @@ -42,4 +52,38 @@ public String toString() { .add("sseEncryptionDetails", sseEncryptionDetails) .toString(); } + + EncryptionDetailsPb toPb() { + EncryptionDetailsPb pb = new EncryptionDetailsPb(); + pb.setSseEncryptionDetails(sseEncryptionDetails); + + return pb; + } + + static EncryptionDetails fromPb(EncryptionDetailsPb pb) { + EncryptionDetails model = new EncryptionDetails(); + model.setSseEncryptionDetails(pb.getSseEncryptionDetails()); + + return model; + } + + public static class EncryptionDetailsSerializer extends JsonSerializer { + @Override + public void serialize(EncryptionDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EncryptionDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EncryptionDetailsDeserializer extends JsonDeserializer { + @Override + public EncryptionDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EncryptionDetailsPb pb = mapper.readValue(p, EncryptionDetailsPb.class); + return EncryptionDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetailsPb.java new file mode 100755 index 000000000..7297acc7b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetailsPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Encryption options that apply to clients connecting to cloud storage. */ +@Generated +class EncryptionDetailsPb { + @JsonProperty("sse_encryption_details") + private SseEncryptionDetails sseEncryptionDetails; + + public EncryptionDetailsPb setSseEncryptionDetails(SseEncryptionDetails sseEncryptionDetails) { + this.sseEncryptionDetails = sseEncryptionDetails; + return this; + } + + public SseEncryptionDetails getSseEncryptionDetails() { + return sseEncryptionDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EncryptionDetailsPb that = (EncryptionDetailsPb) o; + return Objects.equals(sseEncryptionDetails, that.sseEncryptionDetails); + } + + @Override + public int hashCode() { + return Objects.hash(sseEncryptionDetails); + } + + @Override + public String toString() { + return new ToStringer(EncryptionDetailsPb.class) + .add("sseEncryptionDetails", sseEncryptionDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java index 13b2d0b9f..5517ba6d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get boolean reflecting if table exists */ @Generated +@JsonSerialize(using = ExistsRequest.ExistsRequestSerializer.class) +@JsonDeserialize(using = ExistsRequest.ExistsRequestDeserializer.class) public class ExistsRequest { /** Full name of the table. */ - @JsonIgnore private String fullName; + private String fullName; public ExistsRequest setFullName(String fullName) { this.fullName = fullName; @@ -39,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(ExistsRequest.class).add("fullName", fullName).toString(); } + + ExistsRequestPb toPb() { + ExistsRequestPb pb = new ExistsRequestPb(); + pb.setFullName(fullName); + + return pb; + } + + static ExistsRequest fromPb(ExistsRequestPb pb) { + ExistsRequest model = new ExistsRequest(); + model.setFullName(pb.getFullName()); + + return model; + } + + public static class ExistsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ExistsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExistsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExistsRequestDeserializer extends JsonDeserializer { + @Override + public ExistsRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExistsRequestPb pb = mapper.readValue(p, ExistsRequestPb.class); + return ExistsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequestPb.java new file mode 100755 index 000000000..c7fb1faf5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get boolean reflecting if table exists */ +@Generated +class ExistsRequestPb { + @JsonIgnore private String fullName; + + public ExistsRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExistsRequestPb that = (ExistsRequestPb) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(ExistsRequestPb.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index b8a003e2b..876069b5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -4,44 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalLocationInfo.ExternalLocationInfoSerializer.class) +@JsonDeserialize(using = ExternalLocationInfo.ExternalLocationInfoDeserializer.class) public class ExternalLocationInfo { /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Time at which this external location was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of external location creator. */ - @JsonProperty("created_by") private String createdBy; /** Unique ID of the location's storage credential. */ - @JsonProperty("credential_id") private String credentialId; /** Name of the storage credential used with this location. */ - @JsonProperty("credential_name") private String credentialName; /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ - @JsonProperty("enable_file_events") private Boolean enableFileEvents; /** Encryption options that apply to clients connecting to cloud storage. */ - @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; /** @@ -49,43 +52,33 @@ public class ExternalLocationInfo { * enabled, the access to the location falls back to cluster credentials if UC credentials are not * sufficient. */ - @JsonProperty("fallback") private Boolean fallback; /** [Create:OPT Update:OPT] File event queue settings. */ - @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; /** */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Unique identifier of metastore hosting the external location. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of the external location. */ - @JsonProperty("name") private String name; /** The owner of the external location. */ - @JsonProperty("owner") private String owner; /** Indicates whether the external location is read-only. */ - @JsonProperty("read_only") private Boolean readOnly; /** Time at which external location this was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the external location. */ - @JsonProperty("updated_by") private String updatedBy; /** Path URL of the external location. */ - @JsonProperty("url") private String url; public ExternalLocationInfo setBrowseOnly(Boolean browseOnly) { @@ -321,4 +314,74 @@ public String toString() { .add("url", url) .toString(); } + + ExternalLocationInfoPb toPb() { + ExternalLocationInfoPb pb = new ExternalLocationInfoPb(); + pb.setBrowseOnly(browseOnly); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setCredentialId(credentialId); + pb.setCredentialName(credentialName); + pb.setEnableFileEvents(enableFileEvents); + pb.setEncryptionDetails(encryptionDetails); + pb.setFallback(fallback); + pb.setFileEventQueue(fileEventQueue); + pb.setIsolationMode(isolationMode); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setReadOnly(readOnly); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setUrl(url); + + return pb; + } + + static ExternalLocationInfo fromPb(ExternalLocationInfoPb pb) { + ExternalLocationInfo model = new ExternalLocationInfo(); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setCredentialId(pb.getCredentialId()); + model.setCredentialName(pb.getCredentialName()); + model.setEnableFileEvents(pb.getEnableFileEvents()); + model.setEncryptionDetails(pb.getEncryptionDetails()); + model.setFallback(pb.getFallback()); + model.setFileEventQueue(pb.getFileEventQueue()); + model.setIsolationMode(pb.getIsolationMode()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setReadOnly(pb.getReadOnly()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class ExternalLocationInfoSerializer extends JsonSerializer { + @Override + public void serialize( + ExternalLocationInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalLocationInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalLocationInfoDeserializer + extends JsonDeserializer { + @Override + public ExternalLocationInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalLocationInfoPb pb = mapper.readValue(p, ExternalLocationInfoPb.class); + return ExternalLocationInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfoPb.java new file mode 100755 index 000000000..ccd878975 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfoPb.java @@ -0,0 +1,299 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalLocationInfoPb { + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("credential_id") + private String credentialId; + + @JsonProperty("credential_name") + private String credentialName; + + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + + @JsonProperty("encryption_details") + private EncryptionDetails encryptionDetails; + + @JsonProperty("fallback") + private Boolean fallback; + + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("url") + private String url; + + public ExternalLocationInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public ExternalLocationInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ExternalLocationInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExternalLocationInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExternalLocationInfoPb setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public ExternalLocationInfoPb setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public ExternalLocationInfoPb setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + + public ExternalLocationInfoPb setEncryptionDetails(EncryptionDetails encryptionDetails) { + this.encryptionDetails = encryptionDetails; + return this; + } + + public EncryptionDetails getEncryptionDetails() { + return encryptionDetails; + } + + public ExternalLocationInfoPb setFallback(Boolean fallback) { + this.fallback = fallback; + return this; + } + + public Boolean getFallback() { + return fallback; + } + + public ExternalLocationInfoPb setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + + public ExternalLocationInfoPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public ExternalLocationInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ExternalLocationInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalLocationInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public ExternalLocationInfoPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public ExternalLocationInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ExternalLocationInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ExternalLocationInfoPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLocationInfoPb that = (ExternalLocationInfoPb) o; + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(credentialId, that.credentialId) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) + && Objects.equals(encryptionDetails, that.encryptionDetails) + && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + browseOnly, + comment, + createdAt, + createdBy, + credentialId, + credentialName, + enableFileEvents, + encryptionDetails, + fallback, + fileEventQueue, + isolationMode, + metastoreId, + name, + owner, + readOnly, + updatedAt, + updatedBy, + url); + } + + @Override + public String toString() { + return new ToStringer(ExternalLocationInfoPb.class) + .add("browseOnly", browseOnly) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("credentialId", credentialId) + .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) + .add("encryptionDetails", encryptionDetails) + .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("readOnly", readOnly) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java index 8c3107d8c..cee17960b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java @@ -21,7 +21,7 @@ public ExternalLocationInfo create(CreateExternalLocation request) { String path = "/api/2.1/unity-catalog/external-locations"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ExternalLocationInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteExternalLocationRequest request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ExternalLocationInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListExternalLocationsResponse list(ListExternalLocationsRequest request) String path = "/api/2.1/unity-catalog/external-locations"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListExternalLocationsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public ExternalLocationInfo update(UpdateExternalLocation request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ExternalLocationInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java index 3a0447bc2..b9e217a31 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,20 +21,20 @@ * ONLINE_PIPELINE_FAILED state. */ @Generated +@JsonSerialize(using = FailedStatus.FailedStatusSerializer.class) +@JsonDeserialize(using = FailedStatus.FailedStatusDeserializer.class) public class FailedStatus { /** * The last source table Delta version that was synced to the online table. Note that this Delta * version may only be partially synced to the online table. Only populated if the table is still * online and available for serving. */ - @JsonProperty("last_processed_commit_version") private Long lastProcessedCommitVersion; /** * The timestamp of the last time any data was synchronized from the source table to the online * table. Only populated if the table is still online and available for serving. */ - @JsonProperty("timestamp") private String timestamp; public FailedStatus setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { @@ -67,4 +76,39 @@ public String toString() { .add("timestamp", timestamp) .toString(); } + + FailedStatusPb toPb() { + FailedStatusPb pb = new FailedStatusPb(); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + + return pb; + } + + static FailedStatus fromPb(FailedStatusPb pb) { + FailedStatus model = new FailedStatus(); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class FailedStatusSerializer extends JsonSerializer { + @Override + public void serialize(FailedStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FailedStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FailedStatusDeserializer extends JsonDeserializer { + @Override + public FailedStatus deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FailedStatusPb pb = mapper.readValue(p, FailedStatusPb.class); + return FailedStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatusPb.java new file mode 100755 index 000000000..150aeed6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatusPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the + * ONLINE_PIPELINE_FAILED state. + */ +@Generated +class FailedStatusPb { + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + public FailedStatusPb setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public FailedStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FailedStatusPb that = (FailedStatusPb) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(FailedStatusPb.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java index 0fd645462..9bc7f78f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FileEventQueue.FileEventQueueSerializer.class) +@JsonDeserialize(using = FileEventQueue.FileEventQueueDeserializer.class) public class FileEventQueue { /** */ - @JsonProperty("managed_aqs") private AzureQueueStorage managedAqs; /** */ - @JsonProperty("managed_pubsub") private GcpPubsub managedPubsub; /** */ - @JsonProperty("managed_sqs") private AwsSqsQueue managedSqs; /** */ - @JsonProperty("provided_aqs") private AzureQueueStorage providedAqs; /** */ - @JsonProperty("provided_pubsub") private GcpPubsub providedPubsub; /** */ - @JsonProperty("provided_sqs") private AwsSqsQueue providedSqs; public FileEventQueue setManagedAqs(AzureQueueStorage managedAqs) { @@ -117,4 +122,48 @@ public String toString() { .add("providedSqs", providedSqs) .toString(); } + + FileEventQueuePb toPb() { + FileEventQueuePb pb = new FileEventQueuePb(); + pb.setManagedAqs(managedAqs); + pb.setManagedPubsub(managedPubsub); + pb.setManagedSqs(managedSqs); + pb.setProvidedAqs(providedAqs); + pb.setProvidedPubsub(providedPubsub); + pb.setProvidedSqs(providedSqs); + + return pb; + } + + static FileEventQueue fromPb(FileEventQueuePb pb) { + FileEventQueue model = new FileEventQueue(); + model.setManagedAqs(pb.getManagedAqs()); + model.setManagedPubsub(pb.getManagedPubsub()); + model.setManagedSqs(pb.getManagedSqs()); + model.setProvidedAqs(pb.getProvidedAqs()); + model.setProvidedPubsub(pb.getProvidedPubsub()); + model.setProvidedSqs(pb.getProvidedSqs()); + + return model; + } + + public static class FileEventQueueSerializer extends JsonSerializer { + @Override + public void serialize(FileEventQueue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileEventQueuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileEventQueueDeserializer extends JsonDeserializer { + @Override + public FileEventQueue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileEventQueuePb pb = mapper.readValue(p, FileEventQueuePb.class); + return FileEventQueue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueuePb.java new file mode 100755 index 000000000..b48f33c64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueuePb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileEventQueuePb { + @JsonProperty("managed_aqs") + private AzureQueueStorage managedAqs; + + @JsonProperty("managed_pubsub") + private GcpPubsub managedPubsub; + + @JsonProperty("managed_sqs") + private AwsSqsQueue managedSqs; + + @JsonProperty("provided_aqs") + private AzureQueueStorage providedAqs; + + @JsonProperty("provided_pubsub") + private GcpPubsub providedPubsub; + + @JsonProperty("provided_sqs") + private AwsSqsQueue providedSqs; + + public FileEventQueuePb setManagedAqs(AzureQueueStorage managedAqs) { + this.managedAqs = managedAqs; + return this; + } + + public AzureQueueStorage getManagedAqs() { + return managedAqs; + } + + public FileEventQueuePb setManagedPubsub(GcpPubsub managedPubsub) { + this.managedPubsub = managedPubsub; + return this; + } + + public GcpPubsub getManagedPubsub() { + return managedPubsub; + } + + public FileEventQueuePb setManagedSqs(AwsSqsQueue managedSqs) { + this.managedSqs = managedSqs; + return this; + } + + public AwsSqsQueue getManagedSqs() { + return managedSqs; + } + + public FileEventQueuePb setProvidedAqs(AzureQueueStorage providedAqs) { + this.providedAqs = providedAqs; + return this; + } + + public AzureQueueStorage getProvidedAqs() { + return providedAqs; + } + + public FileEventQueuePb setProvidedPubsub(GcpPubsub providedPubsub) { + this.providedPubsub = providedPubsub; + return this; + } + + public GcpPubsub getProvidedPubsub() { + return providedPubsub; + } + + public FileEventQueuePb setProvidedSqs(AwsSqsQueue providedSqs) { + this.providedSqs = providedSqs; + return this; + } + + public AwsSqsQueue getProvidedSqs() { + return providedSqs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileEventQueuePb that = (FileEventQueuePb) o; + return Objects.equals(managedAqs, that.managedAqs) + && Objects.equals(managedPubsub, that.managedPubsub) + && Objects.equals(managedSqs, that.managedSqs) + && Objects.equals(providedAqs, that.providedAqs) + && Objects.equals(providedPubsub, that.providedPubsub) + && Objects.equals(providedSqs, that.providedSqs); + } + + @Override + public int hashCode() { + return Objects.hash( + managedAqs, managedPubsub, managedSqs, providedAqs, providedPubsub, providedSqs); + } + + @Override + public String toString() { + return new ToStringer(FileEventQueuePb.class) + .add("managedAqs", managedAqs) + .add("managedPubsub", managedPubsub) + .add("managedSqs", managedSqs) + .add("providedAqs", providedAqs) + .add("providedPubsub", providedPubsub) + .add("providedSqs", providedSqs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java index cfccffb7d..e67ca87ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ForeignKeyConstraint.ForeignKeyConstraintSerializer.class) +@JsonDeserialize(using = ForeignKeyConstraint.ForeignKeyConstraintDeserializer.class) public class ForeignKeyConstraint { /** Column names for this constraint. */ - @JsonProperty("child_columns") private Collection childColumns; /** The name of the constraint. */ - @JsonProperty("name") private String name; /** Column names for this constraint. */ - @JsonProperty("parent_columns") private Collection parentColumns; /** The full name of the parent constraint. */ - @JsonProperty("parent_table") private String parentTable; public ForeignKeyConstraint setChildColumns(Collection childColumns) { @@ -87,4 +94,46 @@ public String toString() { .add("parentTable", parentTable) .toString(); } + + ForeignKeyConstraintPb toPb() { + ForeignKeyConstraintPb pb = new ForeignKeyConstraintPb(); + pb.setChildColumns(childColumns); + pb.setName(name); + pb.setParentColumns(parentColumns); + pb.setParentTable(parentTable); + + return pb; + } + + static ForeignKeyConstraint fromPb(ForeignKeyConstraintPb pb) { + ForeignKeyConstraint model = new ForeignKeyConstraint(); + model.setChildColumns(pb.getChildColumns()); + model.setName(pb.getName()); + model.setParentColumns(pb.getParentColumns()); + model.setParentTable(pb.getParentTable()); + + return model; + } + + public static class ForeignKeyConstraintSerializer extends JsonSerializer { + @Override + public void serialize( + ForeignKeyConstraint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForeignKeyConstraintPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForeignKeyConstraintDeserializer + extends JsonDeserializer { + @Override + public ForeignKeyConstraint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForeignKeyConstraintPb pb = mapper.readValue(p, ForeignKeyConstraintPb.class); + return ForeignKeyConstraint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraintPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraintPb.java new file mode 100755 index 000000000..2c0fe439b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraintPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ForeignKeyConstraintPb { + @JsonProperty("child_columns") + private Collection childColumns; + + @JsonProperty("name") + private String name; + + @JsonProperty("parent_columns") + private Collection parentColumns; + + @JsonProperty("parent_table") + private String parentTable; + + public ForeignKeyConstraintPb setChildColumns(Collection childColumns) { + this.childColumns = childColumns; + return this; + } + + public Collection getChildColumns() { + return childColumns; + } + + public ForeignKeyConstraintPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ForeignKeyConstraintPb setParentColumns(Collection parentColumns) { + this.parentColumns = parentColumns; + return this; + } + + public Collection getParentColumns() { + return parentColumns; + } + + public ForeignKeyConstraintPb setParentTable(String parentTable) { + this.parentTable = parentTable; + return this; + } + + public String getParentTable() { + return parentTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForeignKeyConstraintPb that = (ForeignKeyConstraintPb) o; + return Objects.equals(childColumns, that.childColumns) + && Objects.equals(name, that.name) + && Objects.equals(parentColumns, that.parentColumns) + && Objects.equals(parentTable, that.parentTable); + } + + @Override + public int hashCode() { + return Objects.hash(childColumns, name, parentColumns, parentTable); + } + + @Override + public String toString() { + return new ToStringer(ForeignKeyConstraintPb.class) + .add("childColumns", childColumns) + .add("name", name) + .add("parentColumns", parentColumns) + .add("parentTable", parentTable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java index c03a80007..4a6706349 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A function that is dependent on a SQL object. */ @Generated +@JsonSerialize(using = FunctionDependency.FunctionDependencySerializer.class) +@JsonDeserialize(using = FunctionDependency.FunctionDependencyDeserializer.class) public class FunctionDependency { /** * Full name of the dependent function, in the form of * __catalog_name__.__schema_name__.__function_name__. */ - @JsonProperty("function_full_name") private String functionFullName; public FunctionDependency setFunctionFullName(String functionFullName) { @@ -45,4 +55,38 @@ public String toString() { .add("functionFullName", functionFullName) .toString(); } + + FunctionDependencyPb toPb() { + FunctionDependencyPb pb = new FunctionDependencyPb(); + pb.setFunctionFullName(functionFullName); + + return pb; + } + + static FunctionDependency fromPb(FunctionDependencyPb pb) { + FunctionDependency model = new FunctionDependency(); + model.setFunctionFullName(pb.getFunctionFullName()); + + return model; + } + + public static class FunctionDependencySerializer extends JsonSerializer { + @Override + public void serialize(FunctionDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionDependencyDeserializer extends JsonDeserializer { + @Override + public FunctionDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionDependencyPb pb = mapper.readValue(p, FunctionDependencyPb.class); + return FunctionDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependencyPb.java new file mode 100755 index 000000000..0ce2b5b1e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependencyPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A function that is dependent on a SQL object. */ +@Generated +class FunctionDependencyPb { + @JsonProperty("function_full_name") + private String functionFullName; + + public FunctionDependencyPb setFunctionFullName(String functionFullName) { + this.functionFullName = functionFullName; + return this; + } + + public String getFunctionFullName() { + return functionFullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionDependencyPb that = (FunctionDependencyPb) o; + return Objects.equals(functionFullName, that.functionFullName); + } + + @Override + public int hashCode() { + return Objects.hash(functionFullName); + } + + @Override + public String toString() { + return new ToStringer(FunctionDependencyPb.class) + .add("functionFullName", functionFullName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java index 02b5d835d..fae3a5e7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java @@ -4,92 +4,83 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FunctionInfo.FunctionInfoSerializer.class) +@JsonDeserialize(using = FunctionInfo.FunctionInfoDeserializer.class) public class FunctionInfo { /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** Name of parent catalog. */ - @JsonProperty("catalog_name") private String catalogName; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Time at which this function was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of function creator. */ - @JsonProperty("created_by") private String createdBy; /** Scalar function return data type. */ - @JsonProperty("data_type") private ColumnTypeName dataType; /** External function language. */ - @JsonProperty("external_language") private String externalLanguage; /** External function name. */ - @JsonProperty("external_name") private String externalName; /** Pretty printed function data type. */ - @JsonProperty("full_data_type") private String fullDataType; /** Full name of function, in form of __catalog_name__.__schema_name__.__function__name__ */ - @JsonProperty("full_name") private String fullName; /** Id of Function, relative to parent schema. */ - @JsonProperty("function_id") private String functionId; /** */ - @JsonProperty("input_params") private FunctionParameterInfos inputParams; /** Whether the function is deterministic. */ - @JsonProperty("is_deterministic") private Boolean isDeterministic; /** Function null call. */ - @JsonProperty("is_null_call") private Boolean isNullCall; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of function, relative to parent schema. */ - @JsonProperty("name") private String name; /** Username of current owner of function. */ - @JsonProperty("owner") private String owner; /** Function parameter style. **S** is the value for SQL. */ - @JsonProperty("parameter_style") private FunctionInfoParameterStyle parameterStyle; /** JSON-serialized key-value pair map, encoded (escaped) as a string. */ - @JsonProperty("properties") private String properties; /** Table function return parameters. */ - @JsonProperty("return_params") private FunctionParameterInfos returnParams; /** @@ -98,43 +89,33 @@ public class FunctionInfo { * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be * **NO_SQL**. */ - @JsonProperty("routine_body") private FunctionInfoRoutineBody routineBody; /** Function body. */ - @JsonProperty("routine_definition") private String routineDefinition; /** Function dependencies. */ - @JsonProperty("routine_dependencies") private DependencyList routineDependencies; /** Name of parent schema relative to its parent catalog. */ - @JsonProperty("schema_name") private String schemaName; /** Function security type. */ - @JsonProperty("security_type") private FunctionInfoSecurityType securityType; /** Specific name of the function; Reserved for future use. */ - @JsonProperty("specific_name") private String specificName; /** Function SQL data access. */ - @JsonProperty("sql_data_access") private FunctionInfoSqlDataAccess sqlDataAccess; /** List of schemes whose objects can be referenced without qualification. */ - @JsonProperty("sql_path") private String sqlPath; /** Time at which this function was created, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified function. */ - @JsonProperty("updated_by") private String updatedBy; public FunctionInfo setBrowseOnly(Boolean browseOnly) { @@ -514,4 +495,95 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + FunctionInfoPb toPb() { + FunctionInfoPb pb = new FunctionInfoPb(); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDataType(dataType); + pb.setExternalLanguage(externalLanguage); + pb.setExternalName(externalName); + pb.setFullDataType(fullDataType); + pb.setFullName(fullName); + pb.setFunctionId(functionId); + pb.setInputParams(inputParams); + pb.setIsDeterministic(isDeterministic); + pb.setIsNullCall(isNullCall); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setParameterStyle(parameterStyle); + pb.setProperties(properties); + pb.setReturnParams(returnParams); + pb.setRoutineBody(routineBody); + pb.setRoutineDefinition(routineDefinition); + pb.setRoutineDependencies(routineDependencies); + pb.setSchemaName(schemaName); + pb.setSecurityType(securityType); + pb.setSpecificName(specificName); + pb.setSqlDataAccess(sqlDataAccess); + pb.setSqlPath(sqlPath); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static FunctionInfo fromPb(FunctionInfoPb pb) { + FunctionInfo model = new FunctionInfo(); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDataType(pb.getDataType()); + model.setExternalLanguage(pb.getExternalLanguage()); + model.setExternalName(pb.getExternalName()); + model.setFullDataType(pb.getFullDataType()); + model.setFullName(pb.getFullName()); + model.setFunctionId(pb.getFunctionId()); + model.setInputParams(pb.getInputParams()); + model.setIsDeterministic(pb.getIsDeterministic()); + model.setIsNullCall(pb.getIsNullCall()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setParameterStyle(pb.getParameterStyle()); + model.setProperties(pb.getProperties()); + model.setReturnParams(pb.getReturnParams()); + model.setRoutineBody(pb.getRoutineBody()); + model.setRoutineDefinition(pb.getRoutineDefinition()); + model.setRoutineDependencies(pb.getRoutineDependencies()); + model.setSchemaName(pb.getSchemaName()); + model.setSecurityType(pb.getSecurityType()); + model.setSpecificName(pb.getSpecificName()); + model.setSqlDataAccess(pb.getSqlDataAccess()); + model.setSqlPath(pb.getSqlPath()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class FunctionInfoSerializer extends JsonSerializer { + @Override + public void serialize(FunctionInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionInfoDeserializer extends JsonDeserializer { + @Override + public FunctionInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionInfoPb pb = mapper.readValue(p, FunctionInfoPb.class); + return FunctionInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoPb.java new file mode 100755 index 000000000..995887798 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoPb.java @@ -0,0 +1,479 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FunctionInfoPb { + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("data_type") + private ColumnTypeName dataType; + + @JsonProperty("external_language") + private String externalLanguage; + + @JsonProperty("external_name") + private String externalName; + + @JsonProperty("full_data_type") + private String fullDataType; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("function_id") + private String functionId; + + @JsonProperty("input_params") + private FunctionParameterInfos inputParams; + + @JsonProperty("is_deterministic") + private Boolean isDeterministic; + + @JsonProperty("is_null_call") + private Boolean isNullCall; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("parameter_style") + private FunctionInfoParameterStyle parameterStyle; + + @JsonProperty("properties") + private String properties; + + @JsonProperty("return_params") + private FunctionParameterInfos returnParams; + + @JsonProperty("routine_body") + private FunctionInfoRoutineBody routineBody; + + @JsonProperty("routine_definition") + private String routineDefinition; + + @JsonProperty("routine_dependencies") + private DependencyList routineDependencies; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("security_type") + private FunctionInfoSecurityType securityType; + + @JsonProperty("specific_name") + private String specificName; + + @JsonProperty("sql_data_access") + private FunctionInfoSqlDataAccess sqlDataAccess; + + @JsonProperty("sql_path") + private String sqlPath; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public FunctionInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public FunctionInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public FunctionInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FunctionInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public FunctionInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public FunctionInfoPb setDataType(ColumnTypeName dataType) { + this.dataType = dataType; + return this; + } + + public ColumnTypeName getDataType() { + return dataType; + } + + public FunctionInfoPb setExternalLanguage(String externalLanguage) { + this.externalLanguage = externalLanguage; + return this; + } + + public String getExternalLanguage() { + return externalLanguage; + } + + public FunctionInfoPb setExternalName(String externalName) { + this.externalName = externalName; + return this; + } + + public String getExternalName() { + return externalName; + } + + public FunctionInfoPb setFullDataType(String fullDataType) { + this.fullDataType = fullDataType; + return this; + } + + public String getFullDataType() { + return fullDataType; + } + + public FunctionInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public FunctionInfoPb setFunctionId(String functionId) { + this.functionId = functionId; + return this; + } + + public String getFunctionId() { + return functionId; + } + + public FunctionInfoPb setInputParams(FunctionParameterInfos inputParams) { + this.inputParams = inputParams; + return this; + } + + public FunctionParameterInfos getInputParams() { + return inputParams; + } + + public FunctionInfoPb setIsDeterministic(Boolean isDeterministic) { + this.isDeterministic = isDeterministic; + return this; + } + + public Boolean getIsDeterministic() { + return isDeterministic; + } + + public FunctionInfoPb setIsNullCall(Boolean isNullCall) { + this.isNullCall = isNullCall; + return this; + } + + public Boolean getIsNullCall() { + return isNullCall; + } + + public FunctionInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public FunctionInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FunctionInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public FunctionInfoPb setParameterStyle(FunctionInfoParameterStyle parameterStyle) { + this.parameterStyle = parameterStyle; + return this; + } + + public FunctionInfoParameterStyle getParameterStyle() { + return parameterStyle; + } + + public FunctionInfoPb setProperties(String properties) { + this.properties = properties; + return this; + } + + public String getProperties() { + return properties; + } + + public FunctionInfoPb setReturnParams(FunctionParameterInfos returnParams) { + this.returnParams = returnParams; + return this; + } + + public FunctionParameterInfos getReturnParams() { + return returnParams; + } + + public FunctionInfoPb setRoutineBody(FunctionInfoRoutineBody routineBody) { + this.routineBody = routineBody; + return this; + } + + public FunctionInfoRoutineBody getRoutineBody() { + return routineBody; + } + + public FunctionInfoPb setRoutineDefinition(String routineDefinition) { + this.routineDefinition = routineDefinition; + return this; + } + + public String getRoutineDefinition() { + return routineDefinition; + } + + public FunctionInfoPb setRoutineDependencies(DependencyList routineDependencies) { + this.routineDependencies = routineDependencies; + return this; + } + + public DependencyList getRoutineDependencies() { + return routineDependencies; + } + + public FunctionInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public FunctionInfoPb setSecurityType(FunctionInfoSecurityType securityType) { + this.securityType = securityType; + return this; + } + + public FunctionInfoSecurityType getSecurityType() { + return securityType; + } + + public FunctionInfoPb setSpecificName(String specificName) { + this.specificName = specificName; + return this; + } + + public String getSpecificName() { + return specificName; + } + + public FunctionInfoPb setSqlDataAccess(FunctionInfoSqlDataAccess sqlDataAccess) { + this.sqlDataAccess = sqlDataAccess; + return this; + } + + public FunctionInfoSqlDataAccess getSqlDataAccess() { + return sqlDataAccess; + } + + public FunctionInfoPb setSqlPath(String sqlPath) { + this.sqlPath = sqlPath; + return this; + } + + public String getSqlPath() { + return sqlPath; + } + + public FunctionInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public FunctionInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionInfoPb that = (FunctionInfoPb) o; + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(dataType, that.dataType) + && Objects.equals(externalLanguage, that.externalLanguage) + && Objects.equals(externalName, that.externalName) + && Objects.equals(fullDataType, that.fullDataType) + && Objects.equals(fullName, that.fullName) + && Objects.equals(functionId, that.functionId) + && Objects.equals(inputParams, that.inputParams) + && Objects.equals(isDeterministic, that.isDeterministic) + && Objects.equals(isNullCall, that.isNullCall) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(parameterStyle, that.parameterStyle) + && Objects.equals(properties, that.properties) + && Objects.equals(returnParams, that.returnParams) + && Objects.equals(routineBody, that.routineBody) + && Objects.equals(routineDefinition, that.routineDefinition) + && Objects.equals(routineDependencies, that.routineDependencies) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(securityType, that.securityType) + && Objects.equals(specificName, that.specificName) + && Objects.equals(sqlDataAccess, that.sqlDataAccess) + && Objects.equals(sqlPath, that.sqlPath) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + dataType, + externalLanguage, + externalName, + fullDataType, + fullName, + functionId, + inputParams, + isDeterministic, + isNullCall, + metastoreId, + name, + owner, + parameterStyle, + properties, + returnParams, + routineBody, + routineDefinition, + routineDependencies, + schemaName, + securityType, + specificName, + sqlDataAccess, + sqlPath, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(FunctionInfoPb.class) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("dataType", dataType) + .add("externalLanguage", externalLanguage) + .add("externalName", externalName) + .add("fullDataType", fullDataType) + .add("fullName", fullName) + .add("functionId", functionId) + .add("inputParams", inputParams) + .add("isDeterministic", isDeterministic) + .add("isNullCall", isNullCall) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("parameterStyle", parameterStyle) + .add("properties", properties) + .add("returnParams", returnParams) + .add("routineBody", routineBody) + .add("routineDefinition", routineDefinition) + .add("routineDependencies", routineDependencies) + .add("schemaName", schemaName) + .add("securityType", securityType) + .add("specificName", specificName) + .add("sqlDataAccess", sqlDataAccess) + .add("sqlPath", sqlPath) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java index 794ac8243..eee396b40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java @@ -4,57 +4,56 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FunctionParameterInfo.FunctionParameterInfoSerializer.class) +@JsonDeserialize(using = FunctionParameterInfo.FunctionParameterInfoDeserializer.class) public class FunctionParameterInfo { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Name of parameter. */ - @JsonProperty("name") private String name; /** Default value of the parameter. */ - @JsonProperty("parameter_default") private String parameterDefault; /** The mode of the function parameter. */ - @JsonProperty("parameter_mode") private FunctionParameterMode parameterMode; /** The type of function parameter. */ - @JsonProperty("parameter_type") private FunctionParameterType parameterType; /** Ordinal position of column (starting at position 0). */ - @JsonProperty("position") private Long position; /** Format of IntervalType. */ - @JsonProperty("type_interval_type") private String typeIntervalType; /** Full data type spec, JSON-serialized. */ - @JsonProperty("type_json") private String typeJson; /** */ - @JsonProperty("type_name") private ColumnTypeName typeName; /** Digits of precision; required on Create for DecimalTypes. */ - @JsonProperty("type_precision") private Long typePrecision; /** Digits to right of decimal; Required on Create for DecimalTypes. */ - @JsonProperty("type_scale") private Long typeScale; /** Full data type spec, SQL/catalogString text. */ - @JsonProperty("type_text") private String typeText; public FunctionParameterInfo setComment(String comment) { @@ -218,4 +217,63 @@ public String toString() { .add("typeText", typeText) .toString(); } + + FunctionParameterInfoPb toPb() { + FunctionParameterInfoPb pb = new FunctionParameterInfoPb(); + pb.setComment(comment); + pb.setName(name); + pb.setParameterDefault(parameterDefault); + pb.setParameterMode(parameterMode); + pb.setParameterType(parameterType); + pb.setPosition(position); + pb.setTypeIntervalType(typeIntervalType); + pb.setTypeJson(typeJson); + pb.setTypeName(typeName); + pb.setTypePrecision(typePrecision); + pb.setTypeScale(typeScale); + pb.setTypeText(typeText); + + return pb; + } + + static FunctionParameterInfo fromPb(FunctionParameterInfoPb pb) { + FunctionParameterInfo model = new FunctionParameterInfo(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setParameterDefault(pb.getParameterDefault()); + model.setParameterMode(pb.getParameterMode()); + model.setParameterType(pb.getParameterType()); + model.setPosition(pb.getPosition()); + model.setTypeIntervalType(pb.getTypeIntervalType()); + model.setTypeJson(pb.getTypeJson()); + model.setTypeName(pb.getTypeName()); + model.setTypePrecision(pb.getTypePrecision()); + model.setTypeScale(pb.getTypeScale()); + model.setTypeText(pb.getTypeText()); + + return model; + } + + public static class FunctionParameterInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + FunctionParameterInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionParameterInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionParameterInfoDeserializer + extends JsonDeserializer { + @Override + public FunctionParameterInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionParameterInfoPb pb = mapper.readValue(p, FunctionParameterInfoPb.class); + return FunctionParameterInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfoPb.java new file mode 100755 index 000000000..a9a04fe09 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfoPb.java @@ -0,0 +1,209 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FunctionParameterInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("parameter_default") + private String parameterDefault; + + @JsonProperty("parameter_mode") + private FunctionParameterMode parameterMode; + + @JsonProperty("parameter_type") + private FunctionParameterType parameterType; + + @JsonProperty("position") + private Long position; + + @JsonProperty("type_interval_type") + private String typeIntervalType; + + @JsonProperty("type_json") + private String typeJson; + + @JsonProperty("type_name") + private ColumnTypeName typeName; + + @JsonProperty("type_precision") + private Long typePrecision; + + @JsonProperty("type_scale") + private Long typeScale; + + @JsonProperty("type_text") + private String typeText; + + public FunctionParameterInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FunctionParameterInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FunctionParameterInfoPb setParameterDefault(String parameterDefault) { + this.parameterDefault = parameterDefault; + return this; + } + + public String getParameterDefault() { + return parameterDefault; + } + + public FunctionParameterInfoPb setParameterMode(FunctionParameterMode parameterMode) { + this.parameterMode = parameterMode; + return this; + } + + public FunctionParameterMode getParameterMode() { + return parameterMode; + } + + public FunctionParameterInfoPb setParameterType(FunctionParameterType parameterType) { + this.parameterType = parameterType; + return this; + } + + public FunctionParameterType getParameterType() { + return parameterType; + } + + public FunctionParameterInfoPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public FunctionParameterInfoPb setTypeIntervalType(String typeIntervalType) { + this.typeIntervalType = typeIntervalType; + return this; + } + + public String getTypeIntervalType() { + return typeIntervalType; + } + + public FunctionParameterInfoPb setTypeJson(String typeJson) { + this.typeJson = typeJson; + return this; + } + + public String getTypeJson() { + return typeJson; + } + + public FunctionParameterInfoPb setTypeName(ColumnTypeName typeName) { + this.typeName = typeName; + return this; + } + + public ColumnTypeName getTypeName() { + return typeName; + } + + public FunctionParameterInfoPb setTypePrecision(Long typePrecision) { + this.typePrecision = typePrecision; + return this; + } + + public Long getTypePrecision() { + return typePrecision; + } + + public FunctionParameterInfoPb setTypeScale(Long typeScale) { + this.typeScale = typeScale; + return this; + } + + public Long getTypeScale() { + return typeScale; + } + + public FunctionParameterInfoPb setTypeText(String typeText) { + this.typeText = typeText; + return this; + } + + public String getTypeText() { + return typeText; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfoPb that = (FunctionParameterInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(parameterDefault, that.parameterDefault) + && Objects.equals(parameterMode, that.parameterMode) + && Objects.equals(parameterType, that.parameterType) + && Objects.equals(position, that.position) + && Objects.equals(typeIntervalType, that.typeIntervalType) + && Objects.equals(typeJson, that.typeJson) + && Objects.equals(typeName, that.typeName) + && Objects.equals(typePrecision, that.typePrecision) + && Objects.equals(typeScale, that.typeScale) + && Objects.equals(typeText, that.typeText); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + name, + parameterDefault, + parameterMode, + parameterType, + position, + typeIntervalType, + typeJson, + typeName, + typePrecision, + typeScale, + typeText); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfoPb.class) + .add("comment", comment) + .add("name", name) + .add("parameterDefault", parameterDefault) + .add("parameterMode", parameterMode) + .add("parameterType", parameterType) + .add("position", position) + .add("typeIntervalType", typeIntervalType) + .add("typeJson", typeJson) + .add("typeName", typeName) + .add("typePrecision", typePrecision) + .add("typeScale", typeScale) + .add("typeText", typeText) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java index 8242101b9..1416d68f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = FunctionParameterInfos.FunctionParameterInfosSerializer.class) +@JsonDeserialize(using = FunctionParameterInfos.FunctionParameterInfosDeserializer.class) public class FunctionParameterInfos { /** The array of __FunctionParameterInfo__ definitions of the function's parameters. */ - @JsonProperty("parameters") private Collection parameters; public FunctionParameterInfos setParameters(Collection parameters) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(FunctionParameterInfos.class).add("parameters", parameters).toString(); } + + FunctionParameterInfosPb toPb() { + FunctionParameterInfosPb pb = new FunctionParameterInfosPb(); + pb.setParameters(parameters); + + return pb; + } + + static FunctionParameterInfos fromPb(FunctionParameterInfosPb pb) { + FunctionParameterInfos model = new FunctionParameterInfos(); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class FunctionParameterInfosSerializer + extends JsonSerializer { + @Override + public void serialize( + FunctionParameterInfos value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionParameterInfosPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionParameterInfosDeserializer + extends JsonDeserializer { + @Override + public FunctionParameterInfos deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionParameterInfosPb pb = mapper.readValue(p, FunctionParameterInfosPb.class); + return FunctionParameterInfos.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfosPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfosPb.java new file mode 100755 index 000000000..f674233e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfosPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class FunctionParameterInfosPb { + @JsonProperty("parameters") + private Collection parameters; + + public FunctionParameterInfosPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfosPb that = (FunctionParameterInfosPb) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfosPb.class).add("parameters", parameters).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java index 422449786..87e005845 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java @@ -21,7 +21,7 @@ public FunctionInfo create(CreateFunctionRequest request) { String path = "/api/2.1/unity-catalog/functions"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FunctionInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteFunctionRequest request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public FunctionInfo get(GetFunctionRequest request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FunctionInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListFunctionsResponse list(ListFunctionsRequest request) { String path = "/api/2.1/unity-catalog/functions"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFunctionsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public FunctionInfo update(UpdateFunction request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FunctionInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java index 6955e7a2c..c916e63e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,9 +21,10 @@ * https://developers.google.com/identity/protocols/oauth2/service-account */ @Generated +@JsonSerialize(using = GcpOauthToken.GcpOauthTokenSerializer.class) +@JsonDeserialize(using = GcpOauthToken.GcpOauthTokenDeserializer.class) public class GcpOauthToken { /** */ - @JsonProperty("oauth_token") private String oauthToken; public GcpOauthToken setOauthToken(String oauthToken) { @@ -43,4 +53,37 @@ public int hashCode() { public String toString() { return new ToStringer(GcpOauthToken.class).add("oauthToken", oauthToken).toString(); } + + GcpOauthTokenPb toPb() { + GcpOauthTokenPb pb = new GcpOauthTokenPb(); + pb.setOauthToken(oauthToken); + + return pb; + } + + static GcpOauthToken fromPb(GcpOauthTokenPb pb) { + GcpOauthToken model = new GcpOauthToken(); + model.setOauthToken(pb.getOauthToken()); + + return model; + } + + public static class GcpOauthTokenSerializer extends JsonSerializer { + @Override + public void serialize(GcpOauthToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpOauthTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpOauthTokenDeserializer extends JsonDeserializer { + @Override + public GcpOauthToken deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpOauthTokenPb pb = mapper.readValue(p, GcpOauthTokenPb.class); + return GcpOauthToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthTokenPb.java new file mode 100755 index 000000000..33448f2c6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthTokenPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * GCP temporary credentials for API authentication. Read more at + * https://developers.google.com/identity/protocols/oauth2/service-account + */ +@Generated +class GcpOauthTokenPb { + @JsonProperty("oauth_token") + private String oauthToken; + + public GcpOauthTokenPb setOauthToken(String oauthToken) { + this.oauthToken = oauthToken; + return this; + } + + public String getOauthToken() { + return oauthToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpOauthTokenPb that = (GcpOauthTokenPb) o; + return Objects.equals(oauthToken, that.oauthToken); + } + + @Override + public int hashCode() { + return Objects.hash(oauthToken); + } + + @Override + public String toString() { + return new ToStringer(GcpOauthTokenPb.class).add("oauthToken", oauthToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java index b834a9ff0..e9f337253 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GcpPubsub.GcpPubsubSerializer.class) +@JsonDeserialize(using = GcpPubsub.GcpPubsubDeserializer.class) public class GcpPubsub { /** Unique identifier included in the name of file events managed cloud resources. */ - @JsonProperty("managed_resource_id") private String managedResourceId; /** * The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription * name} REQUIRED for provided_pubsub. */ - @JsonProperty("subscription_name") private String subscriptionName; public GcpPubsub setManagedResourceId(String managedResourceId) { @@ -59,4 +68,39 @@ public String toString() { .add("subscriptionName", subscriptionName) .toString(); } + + GcpPubsubPb toPb() { + GcpPubsubPb pb = new GcpPubsubPb(); + pb.setManagedResourceId(managedResourceId); + pb.setSubscriptionName(subscriptionName); + + return pb; + } + + static GcpPubsub fromPb(GcpPubsubPb pb) { + GcpPubsub model = new GcpPubsub(); + model.setManagedResourceId(pb.getManagedResourceId()); + model.setSubscriptionName(pb.getSubscriptionName()); + + return model; + } + + public static class GcpPubsubSerializer extends JsonSerializer { + @Override + public void serialize(GcpPubsub value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpPubsubPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpPubsubDeserializer extends JsonDeserializer { + @Override + public GcpPubsub deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpPubsubPb pb = mapper.readValue(p, GcpPubsubPb.class); + return GcpPubsub.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsubPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsubPb.java new file mode 100755 index 000000000..0df212193 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsubPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GcpPubsubPb { + @JsonProperty("managed_resource_id") + private String managedResourceId; + + @JsonProperty("subscription_name") + private String subscriptionName; + + public GcpPubsubPb setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public GcpPubsubPb setSubscriptionName(String subscriptionName) { + this.subscriptionName = subscriptionName; + return this; + } + + public String getSubscriptionName() { + return subscriptionName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpPubsubPb that = (GcpPubsubPb) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(subscriptionName, that.subscriptionName); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, subscriptionName); + } + + @Override + public String toString() { + return new ToStringer(GcpPubsubPb.class) + .add("managedResourceId", managedResourceId) + .add("subscriptionName", subscriptionName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java index 7ca999107..36c49b63e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java @@ -4,19 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The Azure cloud options to customize the requested temporary credential */ @Generated +@JsonSerialize( + using = + GenerateTemporaryServiceCredentialAzureOptions + .GenerateTemporaryServiceCredentialAzureOptionsSerializer.class) +@JsonDeserialize( + using = + GenerateTemporaryServiceCredentialAzureOptions + .GenerateTemporaryServiceCredentialAzureOptionsDeserializer.class) public class GenerateTemporaryServiceCredentialAzureOptions { /** * The resources to which the temporary Azure credential should apply. These resources are the * scopes that are passed to the token provider (see * https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python) */ - @JsonProperty("resources") private Collection resources; public GenerateTemporaryServiceCredentialAzureOptions setResources(Collection resources) { @@ -48,4 +64,47 @@ public String toString() { .add("resources", resources) .toString(); } + + GenerateTemporaryServiceCredentialAzureOptionsPb toPb() { + GenerateTemporaryServiceCredentialAzureOptionsPb pb = + new GenerateTemporaryServiceCredentialAzureOptionsPb(); + pb.setResources(resources); + + return pb; + } + + static GenerateTemporaryServiceCredentialAzureOptions fromPb( + GenerateTemporaryServiceCredentialAzureOptionsPb pb) { + GenerateTemporaryServiceCredentialAzureOptions model = + new GenerateTemporaryServiceCredentialAzureOptions(); + model.setResources(pb.getResources()); + + return model; + } + + public static class GenerateTemporaryServiceCredentialAzureOptionsSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateTemporaryServiceCredentialAzureOptions value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenerateTemporaryServiceCredentialAzureOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateTemporaryServiceCredentialAzureOptionsDeserializer + extends JsonDeserializer { + @Override + public GenerateTemporaryServiceCredentialAzureOptions deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateTemporaryServiceCredentialAzureOptionsPb pb = + mapper.readValue(p, GenerateTemporaryServiceCredentialAzureOptionsPb.class); + return GenerateTemporaryServiceCredentialAzureOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptionsPb.java new file mode 100755 index 000000000..0ccaac8e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptionsPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The Azure cloud options to customize the requested temporary credential */ +@Generated +class GenerateTemporaryServiceCredentialAzureOptionsPb { + @JsonProperty("resources") + private Collection resources; + + public GenerateTemporaryServiceCredentialAzureOptionsPb setResources( + Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialAzureOptionsPb that = + (GenerateTemporaryServiceCredentialAzureOptionsPb) o; + return Objects.equals(resources, that.resources); + } + + @Override + public int hashCode() { + return Objects.hash(resources); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialAzureOptionsPb.class) + .add("resources", resources) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java index b1739a0a9..89b8a19e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java @@ -4,19 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The GCP cloud options to customize the requested temporary credential */ @Generated +@JsonSerialize( + using = + GenerateTemporaryServiceCredentialGcpOptions + .GenerateTemporaryServiceCredentialGcpOptionsSerializer.class) +@JsonDeserialize( + using = + GenerateTemporaryServiceCredentialGcpOptions + .GenerateTemporaryServiceCredentialGcpOptionsDeserializer.class) public class GenerateTemporaryServiceCredentialGcpOptions { /** * The scopes to which the temporary GCP credential should apply. These resources are the scopes * that are passed to the token provider (see * https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials) */ - @JsonProperty("scopes") private Collection scopes; public GenerateTemporaryServiceCredentialGcpOptions setScopes(Collection scopes) { @@ -48,4 +64,47 @@ public String toString() { .add("scopes", scopes) .toString(); } + + GenerateTemporaryServiceCredentialGcpOptionsPb toPb() { + GenerateTemporaryServiceCredentialGcpOptionsPb pb = + new GenerateTemporaryServiceCredentialGcpOptionsPb(); + pb.setScopes(scopes); + + return pb; + } + + static GenerateTemporaryServiceCredentialGcpOptions fromPb( + GenerateTemporaryServiceCredentialGcpOptionsPb pb) { + GenerateTemporaryServiceCredentialGcpOptions model = + new GenerateTemporaryServiceCredentialGcpOptions(); + model.setScopes(pb.getScopes()); + + return model; + } + + public static class GenerateTemporaryServiceCredentialGcpOptionsSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateTemporaryServiceCredentialGcpOptions value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenerateTemporaryServiceCredentialGcpOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateTemporaryServiceCredentialGcpOptionsDeserializer + extends JsonDeserializer { + @Override + public GenerateTemporaryServiceCredentialGcpOptions deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateTemporaryServiceCredentialGcpOptionsPb pb = + mapper.readValue(p, GenerateTemporaryServiceCredentialGcpOptionsPb.class); + return GenerateTemporaryServiceCredentialGcpOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptionsPb.java new file mode 100755 index 000000000..50e1d781d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptionsPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The GCP cloud options to customize the requested temporary credential */ +@Generated +class GenerateTemporaryServiceCredentialGcpOptionsPb { + @JsonProperty("scopes") + private Collection scopes; + + public GenerateTemporaryServiceCredentialGcpOptionsPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialGcpOptionsPb that = + (GenerateTemporaryServiceCredentialGcpOptionsPb) o; + return Objects.equals(scopes, that.scopes); + } + + @Override + public int hashCode() { + return Objects.hash(scopes); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialGcpOptionsPb.class) + .add("scopes", scopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java index f768675fe..6f04c55ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenerateTemporaryServiceCredentialRequest + .GenerateTemporaryServiceCredentialRequestSerializer.class) +@JsonDeserialize( + using = + GenerateTemporaryServiceCredentialRequest + .GenerateTemporaryServiceCredentialRequestDeserializer.class) public class GenerateTemporaryServiceCredentialRequest { /** The Azure cloud options to customize the requested temporary credential */ - @JsonProperty("azure_options") private GenerateTemporaryServiceCredentialAzureOptions azureOptions; /** The name of the service credential used to generate a temporary credential */ - @JsonProperty("credential_name") private String credentialName; /** The GCP cloud options to customize the requested temporary credential */ - @JsonProperty("gcp_options") private GenerateTemporaryServiceCredentialGcpOptions gcpOptions; public GenerateTemporaryServiceCredentialRequest setAzureOptions( @@ -73,4 +87,51 @@ public String toString() { .add("gcpOptions", gcpOptions) .toString(); } + + GenerateTemporaryServiceCredentialRequestPb toPb() { + GenerateTemporaryServiceCredentialRequestPb pb = + new GenerateTemporaryServiceCredentialRequestPb(); + pb.setAzureOptions(azureOptions); + pb.setCredentialName(credentialName); + pb.setGcpOptions(gcpOptions); + + return pb; + } + + static GenerateTemporaryServiceCredentialRequest fromPb( + GenerateTemporaryServiceCredentialRequestPb pb) { + GenerateTemporaryServiceCredentialRequest model = + new GenerateTemporaryServiceCredentialRequest(); + model.setAzureOptions(pb.getAzureOptions()); + model.setCredentialName(pb.getCredentialName()); + model.setGcpOptions(pb.getGcpOptions()); + + return model; + } + + public static class GenerateTemporaryServiceCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateTemporaryServiceCredentialRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenerateTemporaryServiceCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateTemporaryServiceCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GenerateTemporaryServiceCredentialRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateTemporaryServiceCredentialRequestPb pb = + mapper.readValue(p, GenerateTemporaryServiceCredentialRequestPb.class); + return GenerateTemporaryServiceCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequestPb.java new file mode 100755 index 000000000..ccb3ee2f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenerateTemporaryServiceCredentialRequestPb { + @JsonProperty("azure_options") + private GenerateTemporaryServiceCredentialAzureOptions azureOptions; + + @JsonProperty("credential_name") + private String credentialName; + + @JsonProperty("gcp_options") + private GenerateTemporaryServiceCredentialGcpOptions gcpOptions; + + public GenerateTemporaryServiceCredentialRequestPb setAzureOptions( + GenerateTemporaryServiceCredentialAzureOptions azureOptions) { + this.azureOptions = azureOptions; + return this; + } + + public GenerateTemporaryServiceCredentialAzureOptions getAzureOptions() { + return azureOptions; + } + + public GenerateTemporaryServiceCredentialRequestPb setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public GenerateTemporaryServiceCredentialRequestPb setGcpOptions( + GenerateTemporaryServiceCredentialGcpOptions gcpOptions) { + this.gcpOptions = gcpOptions; + return this; + } + + public GenerateTemporaryServiceCredentialGcpOptions getGcpOptions() { + return gcpOptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialRequestPb that = + (GenerateTemporaryServiceCredentialRequestPb) o; + return Objects.equals(azureOptions, that.azureOptions) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(gcpOptions, that.gcpOptions); + } + + @Override + public int hashCode() { + return Objects.hash(azureOptions, credentialName, gcpOptions); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialRequestPb.class) + .add("azureOptions", azureOptions) + .add("credentialName", credentialName) + .add("gcpOptions", gcpOptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java index 250720e39..5d65e1c4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java @@ -4,21 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenerateTemporaryTableCredentialRequest.GenerateTemporaryTableCredentialRequestSerializer + .class) +@JsonDeserialize( + using = + GenerateTemporaryTableCredentialRequest.GenerateTemporaryTableCredentialRequestDeserializer + .class) public class GenerateTemporaryTableCredentialRequest { /** * The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is * specified, the credentials returned will have write permissions, otherwise, it will be read * only. */ - @JsonProperty("operation") private TableOperation operation; /** UUID of the table to read or write. */ - @JsonProperty("table_id") private String tableId; public GenerateTemporaryTableCredentialRequest setOperation(TableOperation operation) { @@ -59,4 +74,47 @@ public String toString() { .add("tableId", tableId) .toString(); } + + GenerateTemporaryTableCredentialRequestPb toPb() { + GenerateTemporaryTableCredentialRequestPb pb = new GenerateTemporaryTableCredentialRequestPb(); + pb.setOperation(operation); + pb.setTableId(tableId); + + return pb; + } + + static GenerateTemporaryTableCredentialRequest fromPb( + GenerateTemporaryTableCredentialRequestPb pb) { + GenerateTemporaryTableCredentialRequest model = new GenerateTemporaryTableCredentialRequest(); + model.setOperation(pb.getOperation()); + model.setTableId(pb.getTableId()); + + return model; + } + + public static class GenerateTemporaryTableCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateTemporaryTableCredentialRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenerateTemporaryTableCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateTemporaryTableCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GenerateTemporaryTableCredentialRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateTemporaryTableCredentialRequestPb pb = + mapper.readValue(p, GenerateTemporaryTableCredentialRequestPb.class); + return GenerateTemporaryTableCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequestPb.java new file mode 100755 index 000000000..0e2c6c30c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenerateTemporaryTableCredentialRequestPb { + @JsonProperty("operation") + private TableOperation operation; + + @JsonProperty("table_id") + private String tableId; + + public GenerateTemporaryTableCredentialRequestPb setOperation(TableOperation operation) { + this.operation = operation; + return this; + } + + public TableOperation getOperation() { + return operation; + } + + public GenerateTemporaryTableCredentialRequestPb setTableId(String tableId) { + this.tableId = tableId; + return this; + } + + public String getTableId() { + return tableId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryTableCredentialRequestPb that = (GenerateTemporaryTableCredentialRequestPb) o; + return Objects.equals(operation, that.operation) && Objects.equals(tableId, that.tableId); + } + + @Override + public int hashCode() { + return Objects.hash(operation, tableId); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryTableCredentialRequestPb.class) + .add("operation", operation) + .add("tableId", tableId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java index be752eec7..cd23f4c64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java @@ -4,16 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenerateTemporaryTableCredentialResponse.GenerateTemporaryTableCredentialResponseSerializer + .class) +@JsonDeserialize( + using = + GenerateTemporaryTableCredentialResponse + .GenerateTemporaryTableCredentialResponseDeserializer.class) public class GenerateTemporaryTableCredentialResponse { /** * AWS temporary credentials for API authentication. Read more at * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. */ - @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; /** @@ -21,39 +37,33 @@ public class GenerateTemporaryTableCredentialResponse { * Managed Identity. Read more at * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token */ - @JsonProperty("azure_aad") private AzureActiveDirectoryToken azureAad; /** * Azure temporary credentials for API authentication. Read more at * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ - @JsonProperty("azure_user_delegation_sas") private AzureUserDelegationSas azureUserDelegationSas; /** * Server time when the credential will expire, in epoch milliseconds. The API client is advised * to cache the credential given this expiration time. */ - @JsonProperty("expiration_time") private Long expirationTime; /** * GCP temporary credentials for API authentication. Read more at * https://developers.google.com/identity/protocols/oauth2/service-account */ - @JsonProperty("gcp_oauth_token") private GcpOauthToken gcpOauthToken; /** * R2 temporary credentials for API authentication. Read more at * https://developers.cloudflare.com/r2/api/s3/tokens/. */ - @JsonProperty("r2_temp_credentials") private R2Credentials r2TempCredentials; /** The URL of the storage path accessible by the temporary credential. */ - @JsonProperty("url") private String url; public GenerateTemporaryTableCredentialResponse setAwsTempCredentials( @@ -160,4 +170,58 @@ public String toString() { .add("url", url) .toString(); } + + GenerateTemporaryTableCredentialResponsePb toPb() { + GenerateTemporaryTableCredentialResponsePb pb = + new GenerateTemporaryTableCredentialResponsePb(); + pb.setAwsTempCredentials(awsTempCredentials); + pb.setAzureAad(azureAad); + pb.setAzureUserDelegationSas(azureUserDelegationSas); + pb.setExpirationTime(expirationTime); + pb.setGcpOauthToken(gcpOauthToken); + pb.setR2TempCredentials(r2TempCredentials); + pb.setUrl(url); + + return pb; + } + + static GenerateTemporaryTableCredentialResponse fromPb( + GenerateTemporaryTableCredentialResponsePb pb) { + GenerateTemporaryTableCredentialResponse model = new GenerateTemporaryTableCredentialResponse(); + model.setAwsTempCredentials(pb.getAwsTempCredentials()); + model.setAzureAad(pb.getAzureAad()); + model.setAzureUserDelegationSas(pb.getAzureUserDelegationSas()); + model.setExpirationTime(pb.getExpirationTime()); + model.setGcpOauthToken(pb.getGcpOauthToken()); + model.setR2TempCredentials(pb.getR2TempCredentials()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class GenerateTemporaryTableCredentialResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateTemporaryTableCredentialResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenerateTemporaryTableCredentialResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateTemporaryTableCredentialResponseDeserializer + extends JsonDeserializer { + @Override + public GenerateTemporaryTableCredentialResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateTemporaryTableCredentialResponsePb pb = + mapper.readValue(p, GenerateTemporaryTableCredentialResponsePb.class); + return GenerateTemporaryTableCredentialResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponsePb.java new file mode 100755 index 000000000..4e739b72d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponsePb.java @@ -0,0 +1,139 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenerateTemporaryTableCredentialResponsePb { + @JsonProperty("aws_temp_credentials") + private AwsCredentials awsTempCredentials; + + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + + @JsonProperty("azure_user_delegation_sas") + private AzureUserDelegationSas azureUserDelegationSas; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("gcp_oauth_token") + private GcpOauthToken gcpOauthToken; + + @JsonProperty("r2_temp_credentials") + private R2Credentials r2TempCredentials; + + @JsonProperty("url") + private String url; + + public GenerateTemporaryTableCredentialResponsePb setAwsTempCredentials( + AwsCredentials awsTempCredentials) { + this.awsTempCredentials = awsTempCredentials; + return this; + } + + public AwsCredentials getAwsTempCredentials() { + return awsTempCredentials; + } + + public GenerateTemporaryTableCredentialResponsePb setAzureAad( + AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + + public GenerateTemporaryTableCredentialResponsePb setAzureUserDelegationSas( + AzureUserDelegationSas azureUserDelegationSas) { + this.azureUserDelegationSas = azureUserDelegationSas; + return this; + } + + public AzureUserDelegationSas getAzureUserDelegationSas() { + return azureUserDelegationSas; + } + + public GenerateTemporaryTableCredentialResponsePb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public GenerateTemporaryTableCredentialResponsePb setGcpOauthToken(GcpOauthToken gcpOauthToken) { + this.gcpOauthToken = gcpOauthToken; + return this; + } + + public GcpOauthToken getGcpOauthToken() { + return gcpOauthToken; + } + + public GenerateTemporaryTableCredentialResponsePb setR2TempCredentials( + R2Credentials r2TempCredentials) { + this.r2TempCredentials = r2TempCredentials; + return this; + } + + public R2Credentials getR2TempCredentials() { + return r2TempCredentials; + } + + public GenerateTemporaryTableCredentialResponsePb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryTableCredentialResponsePb that = + (GenerateTemporaryTableCredentialResponsePb) o; + return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) + && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(gcpOauthToken, that.gcpOauthToken) + && Objects.equals(r2TempCredentials, that.r2TempCredentials) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + awsTempCredentials, + azureAad, + azureUserDelegationSas, + expirationTime, + gcpOauthToken, + r2TempCredentials, + url); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryTableCredentialResponsePb.class) + .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) + .add("azureUserDelegationSas", azureUserDelegationSas) + .add("expirationTime", expirationTime) + .add("gcpOauthToken", gcpOauthToken) + .add("r2TempCredentials", r2TempCredentials) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java index 6d9d03e93..a93d5769d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Gets the metastore assignment for a workspace */ @Generated +@JsonSerialize( + using = + GetAccountMetastoreAssignmentRequest.GetAccountMetastoreAssignmentRequestSerializer.class) +@JsonDeserialize( + using = + GetAccountMetastoreAssignmentRequest.GetAccountMetastoreAssignmentRequestDeserializer.class) public class GetAccountMetastoreAssignmentRequest { /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public GetAccountMetastoreAssignmentRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -41,4 +56,42 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + GetAccountMetastoreAssignmentRequestPb toPb() { + GetAccountMetastoreAssignmentRequestPb pb = new GetAccountMetastoreAssignmentRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static GetAccountMetastoreAssignmentRequest fromPb(GetAccountMetastoreAssignmentRequestPb pb) { + GetAccountMetastoreAssignmentRequest model = new GetAccountMetastoreAssignmentRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class GetAccountMetastoreAssignmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountMetastoreAssignmentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountMetastoreAssignmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountMetastoreAssignmentRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountMetastoreAssignmentRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountMetastoreAssignmentRequestPb pb = + mapper.readValue(p, GetAccountMetastoreAssignmentRequestPb.class); + return GetAccountMetastoreAssignmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequestPb.java new file mode 100755 index 000000000..e7cf1e988 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Gets the metastore assignment for a workspace */ +@Generated +class GetAccountMetastoreAssignmentRequestPb { + @JsonIgnore private Long workspaceId; + + public GetAccountMetastoreAssignmentRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountMetastoreAssignmentRequestPb that = (GetAccountMetastoreAssignmentRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetAccountMetastoreAssignmentRequestPb.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java index e4aa3900a..ef7cc7754 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a metastore */ @Generated +@JsonSerialize(using = GetAccountMetastoreRequest.GetAccountMetastoreRequestSerializer.class) +@JsonDeserialize(using = GetAccountMetastoreRequest.GetAccountMetastoreRequestDeserializer.class) public class GetAccountMetastoreRequest { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; public GetAccountMetastoreRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -41,4 +52,41 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + GetAccountMetastoreRequestPb toPb() { + GetAccountMetastoreRequestPb pb = new GetAccountMetastoreRequestPb(); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static GetAccountMetastoreRequest fromPb(GetAccountMetastoreRequestPb pb) { + GetAccountMetastoreRequest model = new GetAccountMetastoreRequest(); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class GetAccountMetastoreRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountMetastoreRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountMetastoreRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountMetastoreRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountMetastoreRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountMetastoreRequestPb pb = mapper.readValue(p, GetAccountMetastoreRequestPb.class); + return GetAccountMetastoreRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequestPb.java new file mode 100755 index 000000000..366da2289 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a metastore */ +@Generated +class GetAccountMetastoreRequestPb { + @JsonIgnore private String metastoreId; + + public GetAccountMetastoreRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountMetastoreRequestPb that = (GetAccountMetastoreRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId); + } + + @Override + public String toString() { + return new ToStringer(GetAccountMetastoreRequestPb.class) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java index 47d0fd89e..21241a8bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java @@ -4,17 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Gets the named storage credential */ @Generated +@JsonSerialize( + using = GetAccountStorageCredentialRequest.GetAccountStorageCredentialRequestSerializer.class) +@JsonDeserialize( + using = GetAccountStorageCredentialRequest.GetAccountStorageCredentialRequestDeserializer.class) public class GetAccountStorageCredentialRequest { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Name of the storage credential. */ - @JsonIgnore private String storageCredentialName; + private String storageCredentialName; public GetAccountStorageCredentialRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -55,4 +68,44 @@ public String toString() { .add("storageCredentialName", storageCredentialName) .toString(); } + + GetAccountStorageCredentialRequestPb toPb() { + GetAccountStorageCredentialRequestPb pb = new GetAccountStorageCredentialRequestPb(); + pb.setMetastoreId(metastoreId); + pb.setStorageCredentialName(storageCredentialName); + + return pb; + } + + static GetAccountStorageCredentialRequest fromPb(GetAccountStorageCredentialRequestPb pb) { + GetAccountStorageCredentialRequest model = new GetAccountStorageCredentialRequest(); + model.setMetastoreId(pb.getMetastoreId()); + model.setStorageCredentialName(pb.getStorageCredentialName()); + + return model; + } + + public static class GetAccountStorageCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountStorageCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountStorageCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountStorageCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountStorageCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountStorageCredentialRequestPb pb = + mapper.readValue(p, GetAccountStorageCredentialRequestPb.class); + return GetAccountStorageCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequestPb.java new file mode 100755 index 000000000..d2bc77281 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Gets the named storage credential */ +@Generated +class GetAccountStorageCredentialRequestPb { + @JsonIgnore private String metastoreId; + + @JsonIgnore private String storageCredentialName; + + public GetAccountStorageCredentialRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public GetAccountStorageCredentialRequestPb setStorageCredentialName( + String storageCredentialName) { + this.storageCredentialName = storageCredentialName; + return this; + } + + public String getStorageCredentialName() { + return storageCredentialName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountStorageCredentialRequestPb that = (GetAccountStorageCredentialRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(storageCredentialName, that.storageCredentialName); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId, storageCredentialName); + } + + @Override + public String toString() { + return new ToStringer(GetAccountStorageCredentialRequestPb.class) + .add("metastoreId", metastoreId) + .add("storageCredentialName", storageCredentialName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java index 0a910bdc7..a436d4803 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an artifact allowlist */ @Generated +@JsonSerialize(using = GetArtifactAllowlistRequest.GetArtifactAllowlistRequestSerializer.class) +@JsonDeserialize(using = GetArtifactAllowlistRequest.GetArtifactAllowlistRequestDeserializer.class) public class GetArtifactAllowlistRequest { /** The artifact type of the allowlist. */ - @JsonIgnore private ArtifactType artifactType; + private ArtifactType artifactType; public GetArtifactAllowlistRequest setArtifactType(ArtifactType artifactType) { this.artifactType = artifactType; @@ -41,4 +52,41 @@ public String toString() { .add("artifactType", artifactType) .toString(); } + + GetArtifactAllowlistRequestPb toPb() { + GetArtifactAllowlistRequestPb pb = new GetArtifactAllowlistRequestPb(); + pb.setArtifactType(artifactType); + + return pb; + } + + static GetArtifactAllowlistRequest fromPb(GetArtifactAllowlistRequestPb pb) { + GetArtifactAllowlistRequest model = new GetArtifactAllowlistRequest(); + model.setArtifactType(pb.getArtifactType()); + + return model; + } + + public static class GetArtifactAllowlistRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetArtifactAllowlistRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetArtifactAllowlistRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetArtifactAllowlistRequestDeserializer + extends JsonDeserializer { + @Override + public GetArtifactAllowlistRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetArtifactAllowlistRequestPb pb = mapper.readValue(p, GetArtifactAllowlistRequestPb.class); + return GetArtifactAllowlistRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequestPb.java new file mode 100755 index 000000000..bbf75c0eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an artifact allowlist */ +@Generated +class GetArtifactAllowlistRequestPb { + @JsonIgnore private ArtifactType artifactType; + + public GetArtifactAllowlistRequestPb setArtifactType(ArtifactType artifactType) { + this.artifactType = artifactType; + return this; + } + + public ArtifactType getArtifactType() { + return artifactType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetArtifactAllowlistRequestPb that = (GetArtifactAllowlistRequestPb) o; + return Objects.equals(artifactType, that.artifactType); + } + + @Override + public int hashCode() { + return Objects.hash(artifactType); + } + + @Override + public String toString() { + return new ToStringer(GetArtifactAllowlistRequestPb.class) + .add("artifactType", artifactType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java index f0fad5bb6..99c2fa219 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get securable workspace bindings */ @Generated +@JsonSerialize(using = GetBindingsRequest.GetBindingsRequestSerializer.class) +@JsonDeserialize(using = GetBindingsRequest.GetBindingsRequestDeserializer.class) public class GetBindingsRequest { /** * Maximum number of workspace bindings to return. - When set to 0, the page length is set to a @@ -18,23 +28,19 @@ public class GetBindingsRequest { * invalid parameter error is returned; - If not set, all the workspace bindings are returned (not * recommended). */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The name of the securable. */ - @JsonIgnore private String securableName; + private String securableName; /** * The type of the securable to bind to a workspace (catalog, storage_credential, credential, or * external_location). */ - @JsonIgnore private String securableType; + private String securableType; public GetBindingsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; @@ -97,4 +103,44 @@ public String toString() { .add("securableType", securableType) .toString(); } + + GetBindingsRequestPb toPb() { + GetBindingsRequestPb pb = new GetBindingsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setSecurableName(securableName); + pb.setSecurableType(securableType); + + return pb; + } + + static GetBindingsRequest fromPb(GetBindingsRequestPb pb) { + GetBindingsRequest model = new GetBindingsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setSecurableName(pb.getSecurableName()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class GetBindingsRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetBindingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetBindingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetBindingsRequestDeserializer extends JsonDeserializer { + @Override + public GetBindingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetBindingsRequestPb pb = mapper.readValue(p, GetBindingsRequestPb.class); + return GetBindingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequestPb.java new file mode 100755 index 000000000..986d15b4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get securable workspace bindings */ +@Generated +class GetBindingsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private String securableName; + + @JsonIgnore private String securableType; + + public GetBindingsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public GetBindingsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GetBindingsRequestPb setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public GetBindingsRequestPb setSecurableType(String securableType) { + this.securableType = securableType; + return this; + } + + public String getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBindingsRequestPb that = (GetBindingsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(securableName, that.securableName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, securableName, securableType); + } + + @Override + public String toString() { + return new ToStringer(GetBindingsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("securableName", securableName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java index 9f967bcb0..e7bb1f531 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java @@ -3,23 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get Model Version By Alias */ @Generated +@JsonSerialize(using = GetByAliasRequest.GetByAliasRequestSerializer.class) +@JsonDeserialize(using = GetByAliasRequest.GetByAliasRequestDeserializer.class) public class GetByAliasRequest { /** The name of the alias */ - @JsonIgnore private String alias; + private String alias; /** The three-level (fully qualified) name of the registered model */ - @JsonIgnore private String fullName; + private String fullName; /** Whether to include aliases associated with the model version in the response */ - @JsonIgnore - @QueryParam("include_aliases") private Boolean includeAliases; public GetByAliasRequest setAlias(String alias) { @@ -72,4 +80,42 @@ public String toString() { .add("includeAliases", includeAliases) .toString(); } + + GetByAliasRequestPb toPb() { + GetByAliasRequestPb pb = new GetByAliasRequestPb(); + pb.setAlias(alias); + pb.setFullName(fullName); + pb.setIncludeAliases(includeAliases); + + return pb; + } + + static GetByAliasRequest fromPb(GetByAliasRequestPb pb) { + GetByAliasRequest model = new GetByAliasRequest(); + model.setAlias(pb.getAlias()); + model.setFullName(pb.getFullName()); + model.setIncludeAliases(pb.getIncludeAliases()); + + return model; + } + + public static class GetByAliasRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetByAliasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetByAliasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetByAliasRequestDeserializer extends JsonDeserializer { + @Override + public GetByAliasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetByAliasRequestPb pb = mapper.readValue(p, GetByAliasRequestPb.class); + return GetByAliasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequestPb.java new file mode 100755 index 000000000..cc4edbdda --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get Model Version By Alias */ +@Generated +class GetByAliasRequestPb { + @JsonIgnore private String alias; + + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_aliases") + private Boolean includeAliases; + + public GetByAliasRequestPb setAlias(String alias) { + this.alias = alias; + return this; + } + + public String getAlias() { + return alias; + } + + public GetByAliasRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetByAliasRequestPb setIncludeAliases(Boolean includeAliases) { + this.includeAliases = includeAliases; + return this; + } + + public Boolean getIncludeAliases() { + return includeAliases; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetByAliasRequestPb that = (GetByAliasRequestPb) o; + return Objects.equals(alias, that.alias) + && Objects.equals(fullName, that.fullName) + && Objects.equals(includeAliases, that.includeAliases); + } + + @Override + public int hashCode() { + return Objects.hash(alias, fullName, includeAliases); + } + + @Override + public String toString() { + return new ToStringer(GetByAliasRequestPb.class) + .add("alias", alias) + .add("fullName", fullName) + .add("includeAliases", includeAliases) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java index 82fe3ec30..34ec4691b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a catalog */ @Generated +@JsonSerialize(using = GetCatalogRequest.GetCatalogRequestSerializer.class) +@JsonDeserialize(using = GetCatalogRequest.GetCatalogRequestDeserializer.class) public class GetCatalogRequest { /** * Whether to include catalogs in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** The name of the catalog. */ - @JsonIgnore private String name; + private String name; public GetCatalogRequest setIncludeBrowse(Boolean includeBrowse) { this.includeBrowse = includeBrowse; @@ -60,4 +68,40 @@ public String toString() { .add("name", name) .toString(); } + + GetCatalogRequestPb toPb() { + GetCatalogRequestPb pb = new GetCatalogRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setName(name); + + return pb; + } + + static GetCatalogRequest fromPb(GetCatalogRequestPb pb) { + GetCatalogRequest model = new GetCatalogRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setName(pb.getName()); + + return model; + } + + public static class GetCatalogRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCatalogRequestDeserializer extends JsonDeserializer { + @Override + public GetCatalogRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCatalogRequestPb pb = mapper.readValue(p, GetCatalogRequestPb.class); + return GetCatalogRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequestPb.java new file mode 100755 index 000000000..dbb4e5a97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a catalog */ +@Generated +class GetCatalogRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore private String name; + + public GetCatalogRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public GetCatalogRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCatalogRequestPb that = (GetCatalogRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, name); + } + + @Override + public String toString() { + return new ToStringer(GetCatalogRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java index b2aedb9a5..f0bb4f06b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetCatalogWorkspaceBindingsResponse.GetCatalogWorkspaceBindingsResponseSerializer.class) +@JsonDeserialize( + using = + GetCatalogWorkspaceBindingsResponse.GetCatalogWorkspaceBindingsResponseDeserializer.class) public class GetCatalogWorkspaceBindingsResponse { /** A list of workspace IDs */ - @JsonProperty("workspaces") private Collection workspaces; public GetCatalogWorkspaceBindingsResponse setWorkspaces(Collection workspaces) { @@ -42,4 +55,42 @@ public String toString() { .add("workspaces", workspaces) .toString(); } + + GetCatalogWorkspaceBindingsResponsePb toPb() { + GetCatalogWorkspaceBindingsResponsePb pb = new GetCatalogWorkspaceBindingsResponsePb(); + pb.setWorkspaces(workspaces); + + return pb; + } + + static GetCatalogWorkspaceBindingsResponse fromPb(GetCatalogWorkspaceBindingsResponsePb pb) { + GetCatalogWorkspaceBindingsResponse model = new GetCatalogWorkspaceBindingsResponse(); + model.setWorkspaces(pb.getWorkspaces()); + + return model; + } + + public static class GetCatalogWorkspaceBindingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCatalogWorkspaceBindingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCatalogWorkspaceBindingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCatalogWorkspaceBindingsResponseDeserializer + extends JsonDeserializer { + @Override + public GetCatalogWorkspaceBindingsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCatalogWorkspaceBindingsResponsePb pb = + mapper.readValue(p, GetCatalogWorkspaceBindingsResponsePb.class); + return GetCatalogWorkspaceBindingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponsePb.java new file mode 100755 index 000000000..747f75f6d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetCatalogWorkspaceBindingsResponsePb { + @JsonProperty("workspaces") + private Collection workspaces; + + public GetCatalogWorkspaceBindingsResponsePb setWorkspaces(Collection workspaces) { + this.workspaces = workspaces; + return this; + } + + public Collection getWorkspaces() { + return workspaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCatalogWorkspaceBindingsResponsePb that = (GetCatalogWorkspaceBindingsResponsePb) o; + return Objects.equals(workspaces, that.workspaces); + } + + @Override + public int hashCode() { + return Objects.hash(workspaces); + } + + @Override + public String toString() { + return new ToStringer(GetCatalogWorkspaceBindingsResponsePb.class) + .add("workspaces", workspaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java index 153f54518..4d30df3a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a connection */ @Generated +@JsonSerialize(using = GetConnectionRequest.GetConnectionRequestSerializer.class) +@JsonDeserialize(using = GetConnectionRequest.GetConnectionRequestDeserializer.class) public class GetConnectionRequest { /** Name of the connection. */ - @JsonIgnore private String name; + private String name; public GetConnectionRequest setName(String name) { this.name = name; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetConnectionRequest.class).add("name", name).toString(); } + + GetConnectionRequestPb toPb() { + GetConnectionRequestPb pb = new GetConnectionRequestPb(); + pb.setName(name); + + return pb; + } + + static GetConnectionRequest fromPb(GetConnectionRequestPb pb) { + GetConnectionRequest model = new GetConnectionRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetConnectionRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GetConnectionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetConnectionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetConnectionRequestDeserializer + extends JsonDeserializer { + @Override + public GetConnectionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetConnectionRequestPb pb = mapper.readValue(p, GetConnectionRequestPb.class); + return GetConnectionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequestPb.java new file mode 100755 index 000000000..ab786d254 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a connection */ +@Generated +class GetConnectionRequestPb { + @JsonIgnore private String name; + + public GetConnectionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetConnectionRequestPb that = (GetConnectionRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetConnectionRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java index cfb1de4fe..73a8a8924 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a credential */ @Generated +@JsonSerialize(using = GetCredentialRequest.GetCredentialRequestSerializer.class) +@JsonDeserialize(using = GetCredentialRequest.GetCredentialRequestDeserializer.class) public class GetCredentialRequest { /** Name of the credential. */ - @JsonIgnore private String nameArg; + private String nameArg; public GetCredentialRequest setNameArg(String nameArg) { this.nameArg = nameArg; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetCredentialRequest.class).add("nameArg", nameArg).toString(); } + + GetCredentialRequestPb toPb() { + GetCredentialRequestPb pb = new GetCredentialRequestPb(); + pb.setNameArg(nameArg); + + return pb; + } + + static GetCredentialRequest fromPb(GetCredentialRequestPb pb) { + GetCredentialRequest model = new GetCredentialRequest(); + model.setNameArg(pb.getNameArg()); + + return model; + } + + public static class GetCredentialRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GetCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GetCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCredentialRequestPb pb = mapper.readValue(p, GetCredentialRequestPb.class); + return GetCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequestPb.java new file mode 100755 index 000000000..f4ebdf9fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a credential */ +@Generated +class GetCredentialRequestPb { + @JsonIgnore private String nameArg; + + public GetCredentialRequestPb setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialRequestPb that = (GetCredentialRequestPb) o; + return Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(nameArg); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialRequestPb.class).add("nameArg", nameArg).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java index a0603c5fa..a6f3923cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java @@ -3,27 +3,35 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get effective permissions */ @Generated +@JsonSerialize(using = GetEffectiveRequest.GetEffectiveRequestSerializer.class) +@JsonDeserialize(using = GetEffectiveRequest.GetEffectiveRequestDeserializer.class) public class GetEffectiveRequest { /** Full name of securable. */ - @JsonIgnore private String fullName; + private String fullName; /** * If provided, only the effective permissions for the specified principal (user or group) are * returned. */ - @JsonIgnore - @QueryParam("principal") private String principal; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + private SecurableType securableType; public GetEffectiveRequest setFullName(String fullName) { this.fullName = fullName; @@ -75,4 +83,43 @@ public String toString() { .add("securableType", securableType) .toString(); } + + GetEffectiveRequestPb toPb() { + GetEffectiveRequestPb pb = new GetEffectiveRequestPb(); + pb.setFullName(fullName); + pb.setPrincipal(principal); + pb.setSecurableType(securableType); + + return pb; + } + + static GetEffectiveRequest fromPb(GetEffectiveRequestPb pb) { + GetEffectiveRequest model = new GetEffectiveRequest(); + model.setFullName(pb.getFullName()); + model.setPrincipal(pb.getPrincipal()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class GetEffectiveRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetEffectiveRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEffectiveRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEffectiveRequestDeserializer + extends JsonDeserializer { + @Override + public GetEffectiveRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEffectiveRequestPb pb = mapper.readValue(p, GetEffectiveRequestPb.class); + return GetEffectiveRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequestPb.java new file mode 100755 index 000000000..bb9a3e949 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get effective permissions */ +@Generated +class GetEffectiveRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("principal") + private String principal; + + @JsonIgnore private SecurableType securableType; + + public GetEffectiveRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetEffectiveRequestPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public GetEffectiveRequestPb setSecurableType(SecurableType securableType) { + this.securableType = securableType; + return this; + } + + public SecurableType getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEffectiveRequestPb that = (GetEffectiveRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(principal, that.principal) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, principal, securableType); + } + + @Override + public String toString() { + return new ToStringer(GetEffectiveRequestPb.class) + .add("fullName", fullName) + .add("principal", principal) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java index 8ac782a05..bb3522b64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an external location */ @Generated +@JsonSerialize(using = GetExternalLocationRequest.GetExternalLocationRequestSerializer.class) +@JsonDeserialize(using = GetExternalLocationRequest.GetExternalLocationRequestDeserializer.class) public class GetExternalLocationRequest { /** * Whether to include external locations in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** Name of the external location. */ - @JsonIgnore private String name; + private String name; public GetExternalLocationRequest setIncludeBrowse(Boolean includeBrowse) { this.includeBrowse = includeBrowse; @@ -60,4 +68,43 @@ public String toString() { .add("name", name) .toString(); } + + GetExternalLocationRequestPb toPb() { + GetExternalLocationRequestPb pb = new GetExternalLocationRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setName(name); + + return pb; + } + + static GetExternalLocationRequest fromPb(GetExternalLocationRequestPb pb) { + GetExternalLocationRequest model = new GetExternalLocationRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setName(pb.getName()); + + return model; + } + + public static class GetExternalLocationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExternalLocationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExternalLocationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExternalLocationRequestDeserializer + extends JsonDeserializer { + @Override + public GetExternalLocationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExternalLocationRequestPb pb = mapper.readValue(p, GetExternalLocationRequestPb.class); + return GetExternalLocationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequestPb.java new file mode 100755 index 000000000..d12d26319 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an external location */ +@Generated +class GetExternalLocationRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore private String name; + + public GetExternalLocationRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public GetExternalLocationRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExternalLocationRequestPb that = (GetExternalLocationRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, name); + } + + @Override + public String toString() { + return new ToStringer(GetExternalLocationRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java index 13d1d7bfd..edeac1fea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java @@ -3,27 +3,35 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a function */ @Generated +@JsonSerialize(using = GetFunctionRequest.GetFunctionRequestSerializer.class) +@JsonDeserialize(using = GetFunctionRequest.GetFunctionRequestDeserializer.class) public class GetFunctionRequest { /** * Whether to include functions in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** * The fully-qualified name of the function (of the form * __catalog_name__.__schema_name__.__function__name__). */ - @JsonIgnore private String name; + private String name; public GetFunctionRequest setIncludeBrowse(Boolean includeBrowse) { this.includeBrowse = includeBrowse; @@ -63,4 +71,40 @@ public String toString() { .add("name", name) .toString(); } + + GetFunctionRequestPb toPb() { + GetFunctionRequestPb pb = new GetFunctionRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setName(name); + + return pb; + } + + static GetFunctionRequest fromPb(GetFunctionRequestPb pb) { + GetFunctionRequest model = new GetFunctionRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setName(pb.getName()); + + return model; + } + + public static class GetFunctionRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetFunctionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetFunctionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetFunctionRequestDeserializer extends JsonDeserializer { + @Override + public GetFunctionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetFunctionRequestPb pb = mapper.readValue(p, GetFunctionRequestPb.class); + return GetFunctionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequestPb.java new file mode 100755 index 000000000..9ee98f8cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a function */ +@Generated +class GetFunctionRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore private String name; + + public GetFunctionRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public GetFunctionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFunctionRequestPb that = (GetFunctionRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, name); + } + + @Override + public String toString() { + return new ToStringer(GetFunctionRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java index 1fdab979d..2109cfae6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get permissions */ @Generated +@JsonSerialize(using = GetGrantRequest.GetGrantRequestSerializer.class) +@JsonDeserialize(using = GetGrantRequest.GetGrantRequestDeserializer.class) public class GetGrantRequest { /** Full name of securable. */ - @JsonIgnore private String fullName; + private String fullName; /** If provided, only the permissions for the specified principal (user or group) are returned. */ - @JsonIgnore - @QueryParam("principal") private String principal; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + private SecurableType securableType; public GetGrantRequest setFullName(String fullName) { this.fullName = fullName; @@ -72,4 +80,42 @@ public String toString() { .add("securableType", securableType) .toString(); } + + GetGrantRequestPb toPb() { + GetGrantRequestPb pb = new GetGrantRequestPb(); + pb.setFullName(fullName); + pb.setPrincipal(principal); + pb.setSecurableType(securableType); + + return pb; + } + + static GetGrantRequest fromPb(GetGrantRequestPb pb) { + GetGrantRequest model = new GetGrantRequest(); + model.setFullName(pb.getFullName()); + model.setPrincipal(pb.getPrincipal()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class GetGrantRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetGrantRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetGrantRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetGrantRequestDeserializer extends JsonDeserializer { + @Override + public GetGrantRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetGrantRequestPb pb = mapper.readValue(p, GetGrantRequestPb.class); + return GetGrantRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequestPb.java new file mode 100755 index 000000000..2399be71f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get permissions */ +@Generated +class GetGrantRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("principal") + private String principal; + + @JsonIgnore private SecurableType securableType; + + public GetGrantRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetGrantRequestPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public GetGrantRequestPb setSecurableType(SecurableType securableType) { + this.securableType = securableType; + return this; + } + + public SecurableType getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetGrantRequestPb that = (GetGrantRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(principal, that.principal) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, principal, securableType); + } + + @Override + public String toString() { + return new ToStringer(GetGrantRequestPb.class) + .add("fullName", fullName) + .add("principal", principal) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java index 7dde48bd5..ae1dc7780 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a metastore */ @Generated +@JsonSerialize(using = GetMetastoreRequest.GetMetastoreRequestSerializer.class) +@JsonDeserialize(using = GetMetastoreRequest.GetMetastoreRequestDeserializer.class) public class GetMetastoreRequest { /** Unique ID of the metastore. */ - @JsonIgnore private String id; + private String id; public GetMetastoreRequest setId(String id) { this.id = id; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetMetastoreRequest.class).add("id", id).toString(); } + + GetMetastoreRequestPb toPb() { + GetMetastoreRequestPb pb = new GetMetastoreRequestPb(); + pb.setId(id); + + return pb; + } + + static GetMetastoreRequest fromPb(GetMetastoreRequestPb pb) { + GetMetastoreRequest model = new GetMetastoreRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetMetastoreRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetMetastoreRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetMetastoreRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetMetastoreRequestDeserializer + extends JsonDeserializer { + @Override + public GetMetastoreRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetMetastoreRequestPb pb = mapper.readValue(p, GetMetastoreRequestPb.class); + return GetMetastoreRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequestPb.java new file mode 100755 index 000000000..89575d447 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a metastore */ +@Generated +class GetMetastoreRequestPb { + @JsonIgnore private String id; + + public GetMetastoreRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMetastoreRequestPb that = (GetMetastoreRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetMetastoreRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java index 34e138f12..451d3d977 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java @@ -4,91 +4,83 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetMetastoreSummaryResponse.GetMetastoreSummaryResponseSerializer.class) +@JsonDeserialize(using = GetMetastoreSummaryResponse.GetMetastoreSummaryResponseDeserializer.class) public class GetMetastoreSummaryResponse { /** Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`). */ - @JsonProperty("cloud") private String cloud; /** Time at which this metastore was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of metastore creator. */ - @JsonProperty("created_by") private String createdBy; /** Unique identifier of the metastore's (Default) Data Access Configuration. */ - @JsonProperty("default_data_access_config_id") private String defaultDataAccessConfigId; /** * The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta * Sharing as the official name. */ - @JsonProperty("delta_sharing_organization_name") private String deltaSharingOrganizationName; /** The lifetime of delta sharing recipient token in seconds. */ - @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") private Long deltaSharingRecipientTokenLifetimeInSeconds; /** The scope of Delta Sharing enabled for the metastore. */ - @JsonProperty("delta_sharing_scope") - private GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Whether to allow non-DBR clients to directly access entities under the metastore. */ - @JsonProperty("external_access_enabled") private Boolean externalAccessEnabled; /** * Globally unique metastore ID across clouds and regions, of the form * `cloud:region:metastore_id`. */ - @JsonProperty("global_metastore_id") private String globalMetastoreId; /** Unique identifier of metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** The user-specified name of the metastore. */ - @JsonProperty("name") private String name; /** The owner of the metastore. */ - @JsonProperty("owner") private String owner; /** Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). */ - @JsonProperty("privilege_model_version") private String privilegeModelVersion; /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */ - @JsonProperty("region") private String region; /** The storage root URL for metastore */ - @JsonProperty("storage_root") private String storageRoot; /** UUID of storage credential to access the metastore storage_root. */ - @JsonProperty("storage_root_credential_id") private String storageRootCredentialId; /** Name of the storage credential to access the metastore storage_root. */ - @JsonProperty("storage_root_credential_name") private String storageRootCredentialName; /** Time at which the metastore was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the metastore. */ - @JsonProperty("updated_by") private String updatedBy; public GetMetastoreSummaryResponse setCloud(String cloud) { @@ -148,13 +140,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public GetMetastoreSummaryResponse setDeltaSharingScope( - GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope) { + public GetMetastoreSummaryResponse setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public GetMetastoreSummaryResponseDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } @@ -345,4 +336,78 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + GetMetastoreSummaryResponsePb toPb() { + GetMetastoreSummaryResponsePb pb = new GetMetastoreSummaryResponsePb(); + pb.setCloud(cloud); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDefaultDataAccessConfigId(defaultDataAccessConfigId); + pb.setDeltaSharingOrganizationName(deltaSharingOrganizationName); + pb.setDeltaSharingRecipientTokenLifetimeInSeconds(deltaSharingRecipientTokenLifetimeInSeconds); + pb.setDeltaSharingScope(deltaSharingScope); + pb.setExternalAccessEnabled(externalAccessEnabled); + pb.setGlobalMetastoreId(globalMetastoreId); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setPrivilegeModelVersion(privilegeModelVersion); + pb.setRegion(region); + pb.setStorageRoot(storageRoot); + pb.setStorageRootCredentialId(storageRootCredentialId); + pb.setStorageRootCredentialName(storageRootCredentialName); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static GetMetastoreSummaryResponse fromPb(GetMetastoreSummaryResponsePb pb) { + GetMetastoreSummaryResponse model = new GetMetastoreSummaryResponse(); + model.setCloud(pb.getCloud()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDefaultDataAccessConfigId(pb.getDefaultDataAccessConfigId()); + model.setDeltaSharingOrganizationName(pb.getDeltaSharingOrganizationName()); + model.setDeltaSharingRecipientTokenLifetimeInSeconds( + pb.getDeltaSharingRecipientTokenLifetimeInSeconds()); + model.setDeltaSharingScope(pb.getDeltaSharingScope()); + model.setExternalAccessEnabled(pb.getExternalAccessEnabled()); + model.setGlobalMetastoreId(pb.getGlobalMetastoreId()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPrivilegeModelVersion(pb.getPrivilegeModelVersion()); + model.setRegion(pb.getRegion()); + model.setStorageRoot(pb.getStorageRoot()); + model.setStorageRootCredentialId(pb.getStorageRootCredentialId()); + model.setStorageRootCredentialName(pb.getStorageRootCredentialName()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class GetMetastoreSummaryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetMetastoreSummaryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetMetastoreSummaryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetMetastoreSummaryResponseDeserializer + extends JsonDeserializer { + @Override + public GetMetastoreSummaryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetMetastoreSummaryResponsePb pb = mapper.readValue(p, GetMetastoreSummaryResponsePb.class); + return GetMetastoreSummaryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java deleted file mode 100755 index 336e0cc06..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** The scope of Delta Sharing enabled for the metastore. */ -@Generated -public enum GetMetastoreSummaryResponseDeltaSharingScope { - INTERNAL, - INTERNAL_AND_EXTERNAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponsePb.java new file mode 100755 index 000000000..f7b9a9ea9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponsePb.java @@ -0,0 +1,323 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetMetastoreSummaryResponsePb { + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("default_data_access_config_id") + private String defaultDataAccessConfigId; + + @JsonProperty("delta_sharing_organization_name") + private String deltaSharingOrganizationName; + + @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") + private Long deltaSharingRecipientTokenLifetimeInSeconds; + + @JsonProperty("delta_sharing_scope") + private DeltaSharingScopeEnum deltaSharingScope; + + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + + @JsonProperty("global_metastore_id") + private String globalMetastoreId; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("privilege_model_version") + private String privilegeModelVersion; + + @JsonProperty("region") + private String region; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("storage_root_credential_id") + private String storageRootCredentialId; + + @JsonProperty("storage_root_credential_name") + private String storageRootCredentialName; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public GetMetastoreSummaryResponsePb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public GetMetastoreSummaryResponsePb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public GetMetastoreSummaryResponsePb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public GetMetastoreSummaryResponsePb setDefaultDataAccessConfigId( + String defaultDataAccessConfigId) { + this.defaultDataAccessConfigId = defaultDataAccessConfigId; + return this; + } + + public String getDefaultDataAccessConfigId() { + return defaultDataAccessConfigId; + } + + public GetMetastoreSummaryResponsePb setDeltaSharingOrganizationName( + String deltaSharingOrganizationName) { + this.deltaSharingOrganizationName = deltaSharingOrganizationName; + return this; + } + + public String getDeltaSharingOrganizationName() { + return deltaSharingOrganizationName; + } + + public GetMetastoreSummaryResponsePb setDeltaSharingRecipientTokenLifetimeInSeconds( + Long deltaSharingRecipientTokenLifetimeInSeconds) { + this.deltaSharingRecipientTokenLifetimeInSeconds = deltaSharingRecipientTokenLifetimeInSeconds; + return this; + } + + public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { + return deltaSharingRecipientTokenLifetimeInSeconds; + } + + public GetMetastoreSummaryResponsePb setDeltaSharingScope( + DeltaSharingScopeEnum deltaSharingScope) { + this.deltaSharingScope = deltaSharingScope; + return this; + } + + public DeltaSharingScopeEnum getDeltaSharingScope() { + return deltaSharingScope; + } + + public GetMetastoreSummaryResponsePb setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + + public GetMetastoreSummaryResponsePb setGlobalMetastoreId(String globalMetastoreId) { + this.globalMetastoreId = globalMetastoreId; + return this; + } + + public String getGlobalMetastoreId() { + return globalMetastoreId; + } + + public GetMetastoreSummaryResponsePb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public GetMetastoreSummaryResponsePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetMetastoreSummaryResponsePb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public GetMetastoreSummaryResponsePb setPrivilegeModelVersion(String privilegeModelVersion) { + this.privilegeModelVersion = privilegeModelVersion; + return this; + } + + public String getPrivilegeModelVersion() { + return privilegeModelVersion; + } + + public GetMetastoreSummaryResponsePb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public GetMetastoreSummaryResponsePb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public GetMetastoreSummaryResponsePb setStorageRootCredentialId(String storageRootCredentialId) { + this.storageRootCredentialId = storageRootCredentialId; + return this; + } + + public String getStorageRootCredentialId() { + return storageRootCredentialId; + } + + public GetMetastoreSummaryResponsePb setStorageRootCredentialName( + String storageRootCredentialName) { + this.storageRootCredentialName = storageRootCredentialName; + return this; + } + + public String getStorageRootCredentialName() { + return storageRootCredentialName; + } + + public GetMetastoreSummaryResponsePb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public GetMetastoreSummaryResponsePb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMetastoreSummaryResponsePb that = (GetMetastoreSummaryResponsePb) o; + return Objects.equals(cloud, that.cloud) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(defaultDataAccessConfigId, that.defaultDataAccessConfigId) + && Objects.equals(deltaSharingOrganizationName, that.deltaSharingOrganizationName) + && Objects.equals( + deltaSharingRecipientTokenLifetimeInSeconds, + that.deltaSharingRecipientTokenLifetimeInSeconds) + && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(externalAccessEnabled, that.externalAccessEnabled) + && Objects.equals(globalMetastoreId, that.globalMetastoreId) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(privilegeModelVersion, that.privilegeModelVersion) + && Objects.equals(region, that.region) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(storageRootCredentialId, that.storageRootCredentialId) + && Objects.equals(storageRootCredentialName, that.storageRootCredentialName) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + cloud, + createdAt, + createdBy, + defaultDataAccessConfigId, + deltaSharingOrganizationName, + deltaSharingRecipientTokenLifetimeInSeconds, + deltaSharingScope, + externalAccessEnabled, + globalMetastoreId, + metastoreId, + name, + owner, + privilegeModelVersion, + region, + storageRoot, + storageRootCredentialId, + storageRootCredentialName, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(GetMetastoreSummaryResponsePb.class) + .add("cloud", cloud) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("defaultDataAccessConfigId", defaultDataAccessConfigId) + .add("deltaSharingOrganizationName", deltaSharingOrganizationName) + .add( + "deltaSharingRecipientTokenLifetimeInSeconds", + deltaSharingRecipientTokenLifetimeInSeconds) + .add("deltaSharingScope", deltaSharingScope) + .add("externalAccessEnabled", externalAccessEnabled) + .add("globalMetastoreId", globalMetastoreId) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("privilegeModelVersion", privilegeModelVersion) + .add("region", region) + .add("storageRoot", storageRoot) + .add("storageRootCredentialId", storageRootCredentialId) + .add("storageRootCredentialName", storageRootCredentialName) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java index 44e63c4ae..361d33f4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java @@ -3,32 +3,38 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a Model Version */ @Generated +@JsonSerialize(using = GetModelVersionRequest.GetModelVersionRequestSerializer.class) +@JsonDeserialize(using = GetModelVersionRequest.GetModelVersionRequestDeserializer.class) public class GetModelVersionRequest { /** The three-level (fully qualified) name of the model version */ - @JsonIgnore private String fullName; + private String fullName; /** Whether to include aliases associated with the model version in the response */ - @JsonIgnore - @QueryParam("include_aliases") private Boolean includeAliases; /** * Whether to include model versions in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** The integer version number of the model version */ - @JsonIgnore private Long version; + private Long version; public GetModelVersionRequest setFullName(String fullName) { this.fullName = fullName; @@ -91,4 +97,47 @@ public String toString() { .add("version", version) .toString(); } + + GetModelVersionRequestPb toPb() { + GetModelVersionRequestPb pb = new GetModelVersionRequestPb(); + pb.setFullName(fullName); + pb.setIncludeAliases(includeAliases); + pb.setIncludeBrowse(includeBrowse); + pb.setVersion(version); + + return pb; + } + + static GetModelVersionRequest fromPb(GetModelVersionRequestPb pb) { + GetModelVersionRequest model = new GetModelVersionRequest(); + model.setFullName(pb.getFullName()); + model.setIncludeAliases(pb.getIncludeAliases()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class GetModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public GetModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelVersionRequestPb pb = mapper.readValue(p, GetModelVersionRequestPb.class); + return GetModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequestPb.java new file mode 100755 index 000000000..0261ccdd5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Model Version */ +@Generated +class GetModelVersionRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_aliases") + private Boolean includeAliases; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore private Long version; + + public GetModelVersionRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetModelVersionRequestPb setIncludeAliases(Boolean includeAliases) { + this.includeAliases = includeAliases; + return this; + } + + public Boolean getIncludeAliases() { + return includeAliases; + } + + public GetModelVersionRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public GetModelVersionRequestPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelVersionRequestPb that = (GetModelVersionRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeAliases, that.includeAliases) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeAliases, includeBrowse, version); + } + + @Override + public String toString() { + return new ToStringer(GetModelVersionRequestPb.class) + .add("fullName", fullName) + .add("includeAliases", includeAliases) + .add("includeBrowse", includeBrowse) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java index 757e31dc6..50f8cec66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an Online Table */ @Generated +@JsonSerialize(using = GetOnlineTableRequest.GetOnlineTableRequestSerializer.class) +@JsonDeserialize(using = GetOnlineTableRequest.GetOnlineTableRequestDeserializer.class) public class GetOnlineTableRequest { /** Full three-part (catalog, schema, table) name of the table. */ - @JsonIgnore private String name; + private String name; public GetOnlineTableRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetOnlineTableRequest.class).add("name", name).toString(); } + + GetOnlineTableRequestPb toPb() { + GetOnlineTableRequestPb pb = new GetOnlineTableRequestPb(); + pb.setName(name); + + return pb; + } + + static GetOnlineTableRequest fromPb(GetOnlineTableRequestPb pb) { + GetOnlineTableRequest model = new GetOnlineTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetOnlineTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetOnlineTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetOnlineTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetOnlineTableRequestDeserializer + extends JsonDeserializer { + @Override + public GetOnlineTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetOnlineTableRequestPb pb = mapper.readValue(p, GetOnlineTableRequestPb.class); + return GetOnlineTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequestPb.java new file mode 100755 index 000000000..c4a426457 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an Online Table */ +@Generated +class GetOnlineTableRequestPb { + @JsonIgnore private String name; + + public GetOnlineTableRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOnlineTableRequestPb that = (GetOnlineTableRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetOnlineTableRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java index 66a002053..d2eac1a27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a table monitor */ @Generated +@JsonSerialize(using = GetQualityMonitorRequest.GetQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = GetQualityMonitorRequest.GetQualityMonitorRequestDeserializer.class) public class GetQualityMonitorRequest { /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public GetQualityMonitorRequest setTableName(String tableName) { this.tableName = tableName; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetQualityMonitorRequest.class).add("tableName", tableName).toString(); } + + GetQualityMonitorRequestPb toPb() { + GetQualityMonitorRequestPb pb = new GetQualityMonitorRequestPb(); + pb.setTableName(tableName); + + return pb; + } + + static GetQualityMonitorRequest fromPb(GetQualityMonitorRequestPb pb) { + GetQualityMonitorRequest model = new GetQualityMonitorRequest(); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class GetQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public GetQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQualityMonitorRequestPb pb = mapper.readValue(p, GetQualityMonitorRequestPb.class); + return GetQualityMonitorRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequestPb.java new file mode 100755 index 000000000..62bf7eb93 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a table monitor */ +@Generated +class GetQualityMonitorRequestPb { + @JsonIgnore private String tableName; + + public GetQualityMonitorRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQualityMonitorRequestPb that = (GetQualityMonitorRequestPb) o; + return Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(tableName); + } + + @Override + public String toString() { + return new ToStringer(GetQualityMonitorRequestPb.class).add("tableName", tableName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java index 1b577aedf..f7e5cd1b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get information for a single resource quota. */ @Generated +@JsonSerialize(using = GetQuotaRequest.GetQuotaRequestSerializer.class) +@JsonDeserialize(using = GetQuotaRequest.GetQuotaRequestDeserializer.class) public class GetQuotaRequest { /** Full name of the parent resource. Provide the metastore ID if the parent is a metastore. */ - @JsonIgnore private String parentFullName; + private String parentFullName; /** Securable type of the quota parent. */ - @JsonIgnore private String parentSecurableType; + private String parentSecurableType; /** Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. */ - @JsonIgnore private String quotaName; + private String quotaName; public GetQuotaRequest setParentFullName(String parentFullName) { this.parentFullName = parentFullName; @@ -69,4 +80,42 @@ public String toString() { .add("quotaName", quotaName) .toString(); } + + GetQuotaRequestPb toPb() { + GetQuotaRequestPb pb = new GetQuotaRequestPb(); + pb.setParentFullName(parentFullName); + pb.setParentSecurableType(parentSecurableType); + pb.setQuotaName(quotaName); + + return pb; + } + + static GetQuotaRequest fromPb(GetQuotaRequestPb pb) { + GetQuotaRequest model = new GetQuotaRequest(); + model.setParentFullName(pb.getParentFullName()); + model.setParentSecurableType(pb.getParentSecurableType()); + model.setQuotaName(pb.getQuotaName()); + + return model; + } + + public static class GetQuotaRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetQuotaRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQuotaRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQuotaRequestDeserializer extends JsonDeserializer { + @Override + public GetQuotaRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQuotaRequestPb pb = mapper.readValue(p, GetQuotaRequestPb.class); + return GetQuotaRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequestPb.java new file mode 100755 index 000000000..ccac87b61 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get information for a single resource quota. */ +@Generated +class GetQuotaRequestPb { + @JsonIgnore private String parentFullName; + + @JsonIgnore private String parentSecurableType; + + @JsonIgnore private String quotaName; + + public GetQuotaRequestPb setParentFullName(String parentFullName) { + this.parentFullName = parentFullName; + return this; + } + + public String getParentFullName() { + return parentFullName; + } + + public GetQuotaRequestPb setParentSecurableType(String parentSecurableType) { + this.parentSecurableType = parentSecurableType; + return this; + } + + public String getParentSecurableType() { + return parentSecurableType; + } + + public GetQuotaRequestPb setQuotaName(String quotaName) { + this.quotaName = quotaName; + return this; + } + + public String getQuotaName() { + return quotaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQuotaRequestPb that = (GetQuotaRequestPb) o; + return Objects.equals(parentFullName, that.parentFullName) + && Objects.equals(parentSecurableType, that.parentSecurableType) + && Objects.equals(quotaName, that.quotaName); + } + + @Override + public int hashCode() { + return Objects.hash(parentFullName, parentSecurableType, quotaName); + } + + @Override + public String toString() { + return new ToStringer(GetQuotaRequestPb.class) + .add("parentFullName", parentFullName) + .add("parentSecurableType", parentSecurableType) + .add("quotaName", quotaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java index fc01eb61c..2940fb6eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetQuotaResponse.GetQuotaResponseSerializer.class) +@JsonDeserialize(using = GetQuotaResponse.GetQuotaResponseDeserializer.class) public class GetQuotaResponse { /** The returned QuotaInfo. */ - @JsonProperty("quota_info") private QuotaInfo quotaInfo; public GetQuotaResponse setQuotaInfo(QuotaInfo quotaInfo) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetQuotaResponse.class).add("quotaInfo", quotaInfo).toString(); } + + GetQuotaResponsePb toPb() { + GetQuotaResponsePb pb = new GetQuotaResponsePb(); + pb.setQuotaInfo(quotaInfo); + + return pb; + } + + static GetQuotaResponse fromPb(GetQuotaResponsePb pb) { + GetQuotaResponse model = new GetQuotaResponse(); + model.setQuotaInfo(pb.getQuotaInfo()); + + return model; + } + + public static class GetQuotaResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetQuotaResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQuotaResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQuotaResponseDeserializer extends JsonDeserializer { + @Override + public GetQuotaResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQuotaResponsePb pb = mapper.readValue(p, GetQuotaResponsePb.class); + return GetQuotaResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponsePb.java new file mode 100755 index 000000000..21651e057 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetQuotaResponsePb { + @JsonProperty("quota_info") + private QuotaInfo quotaInfo; + + public GetQuotaResponsePb setQuotaInfo(QuotaInfo quotaInfo) { + this.quotaInfo = quotaInfo; + return this; + } + + public QuotaInfo getQuotaInfo() { + return quotaInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQuotaResponsePb that = (GetQuotaResponsePb) o; + return Objects.equals(quotaInfo, that.quotaInfo); + } + + @Override + public int hashCode() { + return Objects.hash(quotaInfo); + } + + @Override + public String toString() { + return new ToStringer(GetQuotaResponsePb.class).add("quotaInfo", quotaInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java index ef0b7ac1b..65d03083c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get refresh */ @Generated +@JsonSerialize(using = GetRefreshRequest.GetRefreshRequestSerializer.class) +@JsonDeserialize(using = GetRefreshRequest.GetRefreshRequestDeserializer.class) public class GetRefreshRequest { /** ID of the refresh. */ - @JsonIgnore private String refreshId; + private String refreshId; /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public GetRefreshRequest setRefreshId(String refreshId) { this.refreshId = refreshId; @@ -54,4 +65,40 @@ public String toString() { .add("tableName", tableName) .toString(); } + + GetRefreshRequestPb toPb() { + GetRefreshRequestPb pb = new GetRefreshRequestPb(); + pb.setRefreshId(refreshId); + pb.setTableName(tableName); + + return pb; + } + + static GetRefreshRequest fromPb(GetRefreshRequestPb pb) { + GetRefreshRequest model = new GetRefreshRequest(); + model.setRefreshId(pb.getRefreshId()); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class GetRefreshRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRefreshRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRefreshRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRefreshRequestDeserializer extends JsonDeserializer { + @Override + public GetRefreshRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRefreshRequestPb pb = mapper.readValue(p, GetRefreshRequestPb.class); + return GetRefreshRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequestPb.java new file mode 100755 index 000000000..e7921da3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get refresh */ +@Generated +class GetRefreshRequestPb { + @JsonIgnore private String refreshId; + + @JsonIgnore private String tableName; + + public GetRefreshRequestPb setRefreshId(String refreshId) { + this.refreshId = refreshId; + return this; + } + + public String getRefreshId() { + return refreshId; + } + + public GetRefreshRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRefreshRequestPb that = (GetRefreshRequestPb) o; + return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(refreshId, tableName); + } + + @Override + public String toString() { + return new ToStringer(GetRefreshRequestPb.class) + .add("refreshId", refreshId) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java index 7ad6ffcdf..4d70698e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java @@ -3,28 +3,34 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a Registered Model */ @Generated +@JsonSerialize(using = GetRegisteredModelRequest.GetRegisteredModelRequestSerializer.class) +@JsonDeserialize(using = GetRegisteredModelRequest.GetRegisteredModelRequestDeserializer.class) public class GetRegisteredModelRequest { /** The three-level (fully qualified) name of the registered model */ - @JsonIgnore private String fullName; + private String fullName; /** Whether to include registered model aliases in the response */ - @JsonIgnore - @QueryParam("include_aliases") private Boolean includeAliases; /** * Whether to include registered models in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; public GetRegisteredModelRequest setFullName(String fullName) { @@ -77,4 +83,45 @@ public String toString() { .add("includeBrowse", includeBrowse) .toString(); } + + GetRegisteredModelRequestPb toPb() { + GetRegisteredModelRequestPb pb = new GetRegisteredModelRequestPb(); + pb.setFullName(fullName); + pb.setIncludeAliases(includeAliases); + pb.setIncludeBrowse(includeBrowse); + + return pb; + } + + static GetRegisteredModelRequest fromPb(GetRegisteredModelRequestPb pb) { + GetRegisteredModelRequest model = new GetRegisteredModelRequest(); + model.setFullName(pb.getFullName()); + model.setIncludeAliases(pb.getIncludeAliases()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + + return model; + } + + public static class GetRegisteredModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRegisteredModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRegisteredModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRegisteredModelRequestDeserializer + extends JsonDeserializer { + @Override + public GetRegisteredModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRegisteredModelRequestPb pb = mapper.readValue(p, GetRegisteredModelRequestPb.class); + return GetRegisteredModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequestPb.java new file mode 100755 index 000000000..8183b8657 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Registered Model */ +@Generated +class GetRegisteredModelRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_aliases") + private Boolean includeAliases; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + public GetRegisteredModelRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetRegisteredModelRequestPb setIncludeAliases(Boolean includeAliases) { + this.includeAliases = includeAliases; + return this; + } + + public Boolean getIncludeAliases() { + return includeAliases; + } + + public GetRegisteredModelRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRegisteredModelRequestPb that = (GetRegisteredModelRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeAliases, that.includeAliases) + && Objects.equals(includeBrowse, that.includeBrowse); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeAliases, includeBrowse); + } + + @Override + public String toString() { + return new ToStringer(GetRegisteredModelRequestPb.class) + .add("fullName", fullName) + .add("includeAliases", includeAliases) + .add("includeBrowse", includeBrowse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java index 11123dd34..0b7ce2f8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java @@ -3,23 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a schema */ @Generated +@JsonSerialize(using = GetSchemaRequest.GetSchemaRequestSerializer.class) +@JsonDeserialize(using = GetSchemaRequest.GetSchemaRequestDeserializer.class) public class GetSchemaRequest { /** Full name of the schema. */ - @JsonIgnore private String fullName; + private String fullName; /** * Whether to include schemas in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; public GetSchemaRequest setFullName(String fullName) { @@ -61,4 +69,40 @@ public String toString() { .add("includeBrowse", includeBrowse) .toString(); } + + GetSchemaRequestPb toPb() { + GetSchemaRequestPb pb = new GetSchemaRequestPb(); + pb.setFullName(fullName); + pb.setIncludeBrowse(includeBrowse); + + return pb; + } + + static GetSchemaRequest fromPb(GetSchemaRequestPb pb) { + GetSchemaRequest model = new GetSchemaRequest(); + model.setFullName(pb.getFullName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + + return model; + } + + public static class GetSchemaRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetSchemaRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSchemaRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSchemaRequestDeserializer extends JsonDeserializer { + @Override + public GetSchemaRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSchemaRequestPb pb = mapper.readValue(p, GetSchemaRequestPb.class); + return GetSchemaRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequestPb.java new file mode 100755 index 000000000..7904a79d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a schema */ +@Generated +class GetSchemaRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + public GetSchemaRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetSchemaRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSchemaRequestPb that = (GetSchemaRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeBrowse); + } + + @Override + public String toString() { + return new ToStringer(GetSchemaRequestPb.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java index 6cf81d031..a8114752a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a credential */ @Generated +@JsonSerialize(using = GetStorageCredentialRequest.GetStorageCredentialRequestSerializer.class) +@JsonDeserialize(using = GetStorageCredentialRequest.GetStorageCredentialRequestDeserializer.class) public class GetStorageCredentialRequest { /** Name of the storage credential. */ - @JsonIgnore private String name; + private String name; public GetStorageCredentialRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetStorageCredentialRequest.class).add("name", name).toString(); } + + GetStorageCredentialRequestPb toPb() { + GetStorageCredentialRequestPb pb = new GetStorageCredentialRequestPb(); + pb.setName(name); + + return pb; + } + + static GetStorageCredentialRequest fromPb(GetStorageCredentialRequestPb pb) { + GetStorageCredentialRequest model = new GetStorageCredentialRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetStorageCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetStorageCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStorageCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStorageCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GetStorageCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStorageCredentialRequestPb pb = mapper.readValue(p, GetStorageCredentialRequestPb.class); + return GetStorageCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequestPb.java similarity index 72% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequestPb.java index 4c54608d7..119c3aefa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequestPb.java @@ -7,13 +7,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get a Synced Database Table */ +/** Get a credential */ @Generated -public class GetSyncedDatabaseTableRequest { - /** */ +class GetStorageCredentialRequestPb { @JsonIgnore private String name; - public GetSyncedDatabaseTableRequest setName(String name) { + public GetStorageCredentialRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +25,7 @@ public String getName() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetSyncedDatabaseTableRequest that = (GetSyncedDatabaseTableRequest) o; + GetStorageCredentialRequestPb that = (GetStorageCredentialRequestPb) o; return Objects.equals(name, that.name); } @@ -37,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetSyncedDatabaseTableRequest.class).add("name", name).toString(); + return new ToStringer(GetStorageCredentialRequestPb.class).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java index 19d8e5f0e..7394f3619 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java @@ -3,33 +3,37 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a table */ @Generated +@JsonSerialize(using = GetTableRequest.GetTableRequestSerializer.class) +@JsonDeserialize(using = GetTableRequest.GetTableRequestDeserializer.class) public class GetTableRequest { /** Full name of the table. */ - @JsonIgnore private String fullName; + private String fullName; /** * Whether to include tables in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** Whether delta metadata should be included in the response. */ - @JsonIgnore - @QueryParam("include_delta_metadata") private Boolean includeDeltaMetadata; /** Whether to include a manifest containing capabilities the table has. */ - @JsonIgnore - @QueryParam("include_manifest_capabilities") private Boolean includeManifestCapabilities; public GetTableRequest setFullName(String fullName) { @@ -93,4 +97,44 @@ public String toString() { .add("includeManifestCapabilities", includeManifestCapabilities) .toString(); } + + GetTableRequestPb toPb() { + GetTableRequestPb pb = new GetTableRequestPb(); + pb.setFullName(fullName); + pb.setIncludeBrowse(includeBrowse); + pb.setIncludeDeltaMetadata(includeDeltaMetadata); + pb.setIncludeManifestCapabilities(includeManifestCapabilities); + + return pb; + } + + static GetTableRequest fromPb(GetTableRequestPb pb) { + GetTableRequest model = new GetTableRequest(); + model.setFullName(pb.getFullName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setIncludeDeltaMetadata(pb.getIncludeDeltaMetadata()); + model.setIncludeManifestCapabilities(pb.getIncludeManifestCapabilities()); + + return model; + } + + public static class GetTableRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetTableRequestDeserializer extends JsonDeserializer { + @Override + public GetTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetTableRequestPb pb = mapper.readValue(p, GetTableRequestPb.class); + return GetTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequestPb.java new file mode 100755 index 000000000..f60acef85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequestPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a table */ +@Generated +class GetTableRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("include_delta_metadata") + private Boolean includeDeltaMetadata; + + @JsonIgnore + @QueryParam("include_manifest_capabilities") + private Boolean includeManifestCapabilities; + + public GetTableRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetTableRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public GetTableRequestPb setIncludeDeltaMetadata(Boolean includeDeltaMetadata) { + this.includeDeltaMetadata = includeDeltaMetadata; + return this; + } + + public Boolean getIncludeDeltaMetadata() { + return includeDeltaMetadata; + } + + public GetTableRequestPb setIncludeManifestCapabilities(Boolean includeManifestCapabilities) { + this.includeManifestCapabilities = includeManifestCapabilities; + return this; + } + + public Boolean getIncludeManifestCapabilities() { + return includeManifestCapabilities; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTableRequestPb that = (GetTableRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata) + && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeBrowse, includeDeltaMetadata, includeManifestCapabilities); + } + + @Override + public String toString() { + return new ToStringer(GetTableRequestPb.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .add("includeDeltaMetadata", includeDeltaMetadata) + .add("includeManifestCapabilities", includeManifestCapabilities) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java index f5e09dfe9..3321f33d5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get catalog workspace bindings */ @Generated +@JsonSerialize(using = GetWorkspaceBindingRequest.GetWorkspaceBindingRequestSerializer.class) +@JsonDeserialize(using = GetWorkspaceBindingRequest.GetWorkspaceBindingRequestDeserializer.class) public class GetWorkspaceBindingRequest { /** The name of the catalog. */ - @JsonIgnore private String name; + private String name; public GetWorkspaceBindingRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetWorkspaceBindingRequest.class).add("name", name).toString(); } + + GetWorkspaceBindingRequestPb toPb() { + GetWorkspaceBindingRequestPb pb = new GetWorkspaceBindingRequestPb(); + pb.setName(name); + + return pb; + } + + static GetWorkspaceBindingRequest fromPb(GetWorkspaceBindingRequestPb pb) { + GetWorkspaceBindingRequest model = new GetWorkspaceBindingRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetWorkspaceBindingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceBindingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceBindingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceBindingRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceBindingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceBindingRequestPb pb = mapper.readValue(p, GetWorkspaceBindingRequestPb.class); + return GetWorkspaceBindingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequestPb.java similarity index 73% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequestPb.java index 3a455fea8..ba15f9013 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequestPb.java @@ -7,13 +7,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a Database Catalog */ +/** Get catalog workspace bindings */ @Generated -public class DeleteDatabaseCatalogRequest { - /** */ +class GetWorkspaceBindingRequestPb { @JsonIgnore private String name; - public DeleteDatabaseCatalogRequest setName(String name) { + public GetWorkspaceBindingRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +25,7 @@ public String getName() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - DeleteDatabaseCatalogRequest that = (DeleteDatabaseCatalogRequest) o; + GetWorkspaceBindingRequestPb that = (GetWorkspaceBindingRequestPb) o; return Objects.equals(name, that.name); } @@ -37,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteDatabaseCatalogRequest.class).add("name", name).toString(); + return new ToStringer(GetWorkspaceBindingRequestPb.class).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java index 7052a938b..1175b6ed4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java @@ -4,21 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetWorkspaceBindingsResponse.GetWorkspaceBindingsResponseSerializer.class) +@JsonDeserialize( + using = GetWorkspaceBindingsResponse.GetWorkspaceBindingsResponseDeserializer.class) public class GetWorkspaceBindingsResponse { /** List of workspace bindings */ - @JsonProperty("bindings") private Collection bindings; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public GetWorkspaceBindingsResponse setBindings(Collection bindings) { @@ -60,4 +70,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetWorkspaceBindingsResponsePb toPb() { + GetWorkspaceBindingsResponsePb pb = new GetWorkspaceBindingsResponsePb(); + pb.setBindings(bindings); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetWorkspaceBindingsResponse fromPb(GetWorkspaceBindingsResponsePb pb) { + GetWorkspaceBindingsResponse model = new GetWorkspaceBindingsResponse(); + model.setBindings(pb.getBindings()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetWorkspaceBindingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceBindingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceBindingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceBindingsResponseDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceBindingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceBindingsResponsePb pb = mapper.readValue(p, GetWorkspaceBindingsResponsePb.class); + return GetWorkspaceBindingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponsePb.java new file mode 100755 index 000000000..b0849a44f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetWorkspaceBindingsResponsePb { + @JsonProperty("bindings") + private Collection bindings; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetWorkspaceBindingsResponsePb setBindings(Collection bindings) { + this.bindings = bindings; + return this; + } + + public Collection getBindings() { + return bindings; + } + + public GetWorkspaceBindingsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceBindingsResponsePb that = (GetWorkspaceBindingsResponsePb) o; + return Objects.equals(bindings, that.bindings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(bindings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceBindingsResponsePb.class) + .add("bindings", bindings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java index 1a8219ede..f4cb09a75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java @@ -24,7 +24,7 @@ public PermissionsList get(GetGrantRequest request) { request.getSecurableType(), request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PermissionsList.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public EffectivePermissionsList getEffective(GetEffectiveRequest request) { request.getSecurableType(), request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, EffectivePermissionsList.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public PermissionsList update(UpdatePermissions request) { request.getSecurableType(), request.getFullName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PermissionsList.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java index fad85ca91..7181125fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get all workspaces assigned to a metastore */ @Generated +@JsonSerialize( + using = + ListAccountMetastoreAssignmentsRequest.ListAccountMetastoreAssignmentsRequestSerializer + .class) +@JsonDeserialize( + using = + ListAccountMetastoreAssignmentsRequest.ListAccountMetastoreAssignmentsRequestDeserializer + .class) public class ListAccountMetastoreAssignmentsRequest { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; public ListAccountMetastoreAssignmentsRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -41,4 +58,45 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + ListAccountMetastoreAssignmentsRequestPb toPb() { + ListAccountMetastoreAssignmentsRequestPb pb = new ListAccountMetastoreAssignmentsRequestPb(); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static ListAccountMetastoreAssignmentsRequest fromPb( + ListAccountMetastoreAssignmentsRequestPb pb) { + ListAccountMetastoreAssignmentsRequest model = new ListAccountMetastoreAssignmentsRequest(); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class ListAccountMetastoreAssignmentsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountMetastoreAssignmentsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListAccountMetastoreAssignmentsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountMetastoreAssignmentsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountMetastoreAssignmentsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountMetastoreAssignmentsRequestPb pb = + mapper.readValue(p, ListAccountMetastoreAssignmentsRequestPb.class); + return ListAccountMetastoreAssignmentsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequestPb.java new file mode 100755 index 000000000..4849d7d07 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get all workspaces assigned to a metastore */ +@Generated +class ListAccountMetastoreAssignmentsRequestPb { + @JsonIgnore private String metastoreId; + + public ListAccountMetastoreAssignmentsRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountMetastoreAssignmentsRequestPb that = (ListAccountMetastoreAssignmentsRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId); + } + + @Override + public String toString() { + return new ToStringer(ListAccountMetastoreAssignmentsRequestPb.class) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java index 5ca1d4263..738153b74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java @@ -4,15 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The list of workspaces to which the given metastore is assigned. */ @Generated +@JsonSerialize( + using = + ListAccountMetastoreAssignmentsResponse.ListAccountMetastoreAssignmentsResponseSerializer + .class) +@JsonDeserialize( + using = + ListAccountMetastoreAssignmentsResponse.ListAccountMetastoreAssignmentsResponseDeserializer + .class) public class ListAccountMetastoreAssignmentsResponse { /** */ - @JsonProperty("workspace_ids") private Collection workspaceIds; public ListAccountMetastoreAssignmentsResponse setWorkspaceIds(Collection workspaceIds) { @@ -43,4 +59,45 @@ public String toString() { .add("workspaceIds", workspaceIds) .toString(); } + + ListAccountMetastoreAssignmentsResponsePb toPb() { + ListAccountMetastoreAssignmentsResponsePb pb = new ListAccountMetastoreAssignmentsResponsePb(); + pb.setWorkspaceIds(workspaceIds); + + return pb; + } + + static ListAccountMetastoreAssignmentsResponse fromPb( + ListAccountMetastoreAssignmentsResponsePb pb) { + ListAccountMetastoreAssignmentsResponse model = new ListAccountMetastoreAssignmentsResponse(); + model.setWorkspaceIds(pb.getWorkspaceIds()); + + return model; + } + + public static class ListAccountMetastoreAssignmentsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountMetastoreAssignmentsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListAccountMetastoreAssignmentsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountMetastoreAssignmentsResponseDeserializer + extends JsonDeserializer { + @Override + public ListAccountMetastoreAssignmentsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountMetastoreAssignmentsResponsePb pb = + mapper.readValue(p, ListAccountMetastoreAssignmentsResponsePb.class); + return ListAccountMetastoreAssignmentsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponsePb.java new file mode 100755 index 000000000..77e9414ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The list of workspaces to which the given metastore is assigned. */ +@Generated +class ListAccountMetastoreAssignmentsResponsePb { + @JsonProperty("workspace_ids") + private Collection workspaceIds; + + public ListAccountMetastoreAssignmentsResponsePb setWorkspaceIds(Collection workspaceIds) { + this.workspaceIds = workspaceIds; + return this; + } + + public Collection getWorkspaceIds() { + return workspaceIds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountMetastoreAssignmentsResponsePb that = (ListAccountMetastoreAssignmentsResponsePb) o; + return Objects.equals(workspaceIds, that.workspaceIds); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceIds); + } + + @Override + public String toString() { + return new ToStringer(ListAccountMetastoreAssignmentsResponsePb.class) + .add("workspaceIds", workspaceIds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java index b38c46a54..4a06849df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get all storage credentials assigned to a metastore */ @Generated +@JsonSerialize( + using = + ListAccountStorageCredentialsRequest.ListAccountStorageCredentialsRequestSerializer.class) +@JsonDeserialize( + using = + ListAccountStorageCredentialsRequest.ListAccountStorageCredentialsRequestDeserializer.class) public class ListAccountStorageCredentialsRequest { /** Unity Catalog metastore ID */ - @JsonIgnore private String metastoreId; + private String metastoreId; public ListAccountStorageCredentialsRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -41,4 +56,42 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + ListAccountStorageCredentialsRequestPb toPb() { + ListAccountStorageCredentialsRequestPb pb = new ListAccountStorageCredentialsRequestPb(); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static ListAccountStorageCredentialsRequest fromPb(ListAccountStorageCredentialsRequestPb pb) { + ListAccountStorageCredentialsRequest model = new ListAccountStorageCredentialsRequest(); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class ListAccountStorageCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountStorageCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountStorageCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountStorageCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountStorageCredentialsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountStorageCredentialsRequestPb pb = + mapper.readValue(p, ListAccountStorageCredentialsRequestPb.class); + return ListAccountStorageCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequestPb.java new file mode 100755 index 000000000..cc3d17c4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get all storage credentials assigned to a metastore */ +@Generated +class ListAccountStorageCredentialsRequestPb { + @JsonIgnore private String metastoreId; + + public ListAccountStorageCredentialsRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountStorageCredentialsRequestPb that = (ListAccountStorageCredentialsRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId); + } + + @Override + public String toString() { + return new ToStringer(ListAccountStorageCredentialsRequestPb.class) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java index a5da186e4..51917452d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListAccountStorageCredentialsResponse.ListAccountStorageCredentialsResponseSerializer.class) +@JsonDeserialize( + using = + ListAccountStorageCredentialsResponse.ListAccountStorageCredentialsResponseDeserializer + .class) public class ListAccountStorageCredentialsResponse { /** An array of metastore storage credentials. */ - @JsonProperty("storage_credentials") private Collection storageCredentials; public ListAccountStorageCredentialsResponse setStorageCredentials( @@ -43,4 +58,42 @@ public String toString() { .add("storageCredentials", storageCredentials) .toString(); } + + ListAccountStorageCredentialsResponsePb toPb() { + ListAccountStorageCredentialsResponsePb pb = new ListAccountStorageCredentialsResponsePb(); + pb.setStorageCredentials(storageCredentials); + + return pb; + } + + static ListAccountStorageCredentialsResponse fromPb(ListAccountStorageCredentialsResponsePb pb) { + ListAccountStorageCredentialsResponse model = new ListAccountStorageCredentialsResponse(); + model.setStorageCredentials(pb.getStorageCredentials()); + + return model; + } + + public static class ListAccountStorageCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountStorageCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountStorageCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountStorageCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public ListAccountStorageCredentialsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountStorageCredentialsResponsePb pb = + mapper.readValue(p, ListAccountStorageCredentialsResponsePb.class); + return ListAccountStorageCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponsePb.java new file mode 100755 index 000000000..6da954515 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAccountStorageCredentialsResponsePb { + @JsonProperty("storage_credentials") + private Collection storageCredentials; + + public ListAccountStorageCredentialsResponsePb setStorageCredentials( + Collection storageCredentials) { + this.storageCredentials = storageCredentials; + return this; + } + + public Collection getStorageCredentials() { + return storageCredentials; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountStorageCredentialsResponsePb that = (ListAccountStorageCredentialsResponsePb) o; + return Objects.equals(storageCredentials, that.storageCredentials); + } + + @Override + public int hashCode() { + return Objects.hash(storageCredentials); + } + + @Override + public String toString() { + return new ToStringer(ListAccountStorageCredentialsResponsePb.class) + .add("storageCredentials", storageCredentials) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java index cb6be8a63..0d7e08804 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List catalogs */ @Generated +@JsonSerialize(using = ListCatalogsRequest.ListCatalogsRequestSerializer.class) +@JsonDeserialize(using = ListCatalogsRequest.ListCatalogsRequestDeserializer.class) public class ListCatalogsRequest { /** * Whether to include catalogs in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -28,13 +36,9 @@ public class ListCatalogsRequest { * max_results size, even zero. The only definitive indication that no further catalogs can be * fetched is when the next_page_token is unset from the response. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListCatalogsRequest setIncludeBrowse(Boolean includeBrowse) { @@ -87,4 +91,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListCatalogsRequestPb toPb() { + ListCatalogsRequestPb pb = new ListCatalogsRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListCatalogsRequest fromPb(ListCatalogsRequestPb pb) { + ListCatalogsRequest model = new ListCatalogsRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListCatalogsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListCatalogsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCatalogsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCatalogsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCatalogsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCatalogsRequestPb pb = mapper.readValue(p, ListCatalogsRequestPb.class); + return ListCatalogsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequestPb.java new file mode 100755 index 000000000..67a124115 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List catalogs */ +@Generated +class ListCatalogsRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListCatalogsRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListCatalogsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListCatalogsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCatalogsRequestPb that = (ListCatalogsRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCatalogsRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java index 314b99cbd..cc0d5f9f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListCatalogsResponse.ListCatalogsResponseSerializer.class) +@JsonDeserialize(using = ListCatalogsResponse.ListCatalogsResponseDeserializer.class) public class ListCatalogsResponse { /** An array of catalog information objects. */ - @JsonProperty("catalogs") private Collection catalogs; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListCatalogsResponse setCatalogs(Collection catalogs) { @@ -60,4 +69,42 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListCatalogsResponsePb toPb() { + ListCatalogsResponsePb pb = new ListCatalogsResponsePb(); + pb.setCatalogs(catalogs); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListCatalogsResponse fromPb(ListCatalogsResponsePb pb) { + ListCatalogsResponse model = new ListCatalogsResponse(); + model.setCatalogs(pb.getCatalogs()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListCatalogsResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListCatalogsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCatalogsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCatalogsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCatalogsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCatalogsResponsePb pb = mapper.readValue(p, ListCatalogsResponsePb.class); + return ListCatalogsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponsePb.java new file mode 100755 index 000000000..9a3a695ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCatalogsResponsePb { + @JsonProperty("catalogs") + private Collection catalogs; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListCatalogsResponsePb setCatalogs(Collection catalogs) { + this.catalogs = catalogs; + return this; + } + + public Collection getCatalogs() { + return catalogs; + } + + public ListCatalogsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCatalogsResponsePb that = (ListCatalogsResponsePb) o; + return Objects.equals(catalogs, that.catalogs) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(catalogs, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCatalogsResponsePb.class) + .add("catalogs", catalogs) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java index 2627b4aef..25df00035 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List connections */ @Generated +@JsonSerialize(using = ListConnectionsRequest.ListConnectionsRequestSerializer.class) +@JsonDeserialize(using = ListConnectionsRequest.ListConnectionsRequestDeserializer.class) public class ListConnectionsRequest { /** * Maximum number of connections to return. - If not set, all connections are returned (not @@ -18,13 +28,9 @@ public class ListConnectionsRequest { * configured value (recommended); - when set to a value less than 0, an invalid parameter error * is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListConnectionsRequest setMaxResults(Long maxResults) { @@ -65,4 +71,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListConnectionsRequestPb toPb() { + ListConnectionsRequestPb pb = new ListConnectionsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListConnectionsRequest fromPb(ListConnectionsRequestPb pb) { + ListConnectionsRequest model = new ListConnectionsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListConnectionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListConnectionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListConnectionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListConnectionsRequestDeserializer + extends JsonDeserializer { + @Override + public ListConnectionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListConnectionsRequestPb pb = mapper.readValue(p, ListConnectionsRequestPb.class); + return ListConnectionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequestPb.java new file mode 100755 index 000000000..dbafda352 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List connections */ +@Generated +class ListConnectionsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListConnectionsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListConnectionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListConnectionsRequestPb that = (ListConnectionsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListConnectionsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java index 51088a406..0c54df090 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListConnectionsResponse.ListConnectionsResponseSerializer.class) +@JsonDeserialize(using = ListConnectionsResponse.ListConnectionsResponseDeserializer.class) public class ListConnectionsResponse { /** An array of connection information objects. */ - @JsonProperty("connections") private Collection connections; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListConnectionsResponse setConnections(Collection connections) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListConnectionsResponsePb toPb() { + ListConnectionsResponsePb pb = new ListConnectionsResponsePb(); + pb.setConnections(connections); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListConnectionsResponse fromPb(ListConnectionsResponsePb pb) { + ListConnectionsResponse model = new ListConnectionsResponse(); + model.setConnections(pb.getConnections()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListConnectionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListConnectionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListConnectionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListConnectionsResponseDeserializer + extends JsonDeserializer { + @Override + public ListConnectionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListConnectionsResponsePb pb = mapper.readValue(p, ListConnectionsResponsePb.class); + return ListConnectionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponsePb.java new file mode 100755 index 000000000..da583a8d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListConnectionsResponsePb { + @JsonProperty("connections") + private Collection connections; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListConnectionsResponsePb setConnections(Collection connections) { + this.connections = connections; + return this; + } + + public Collection getConnections() { + return connections; + } + + public ListConnectionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListConnectionsResponsePb that = (ListConnectionsResponsePb) o; + return Objects.equals(connections, that.connections) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(connections, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListConnectionsResponsePb.class) + .add("connections", connections) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java index 775a697e7..3f5388318 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List credentials */ @Generated +@JsonSerialize(using = ListCredentialsRequest.ListCredentialsRequestSerializer.class) +@JsonDeserialize(using = ListCredentialsRequest.ListCredentialsRequestDeserializer.class) public class ListCredentialsRequest { /** * Maximum number of credentials to return. - If not set, the default max page size is used. - @@ -17,18 +27,12 @@ public class ListCredentialsRequest { * server-configured value. - When set to 0, the page length is set to a server-configured value * (recommended). - When set to a value less than 0, an invalid parameter error is returned. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque token to retrieve the next page of results. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Return only credentials for the specified purpose. */ - @JsonIgnore - @QueryParam("purpose") private CredentialPurpose purpose; public ListCredentialsRequest setMaxResults(Long maxResults) { @@ -81,4 +85,45 @@ public String toString() { .add("purpose", purpose) .toString(); } + + ListCredentialsRequestPb toPb() { + ListCredentialsRequestPb pb = new ListCredentialsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setPurpose(purpose); + + return pb; + } + + static ListCredentialsRequest fromPb(ListCredentialsRequestPb pb) { + ListCredentialsRequest model = new ListCredentialsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setPurpose(pb.getPurpose()); + + return model; + } + + public static class ListCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCredentialsRequestPb pb = mapper.readValue(p, ListCredentialsRequestPb.class); + return ListCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequestPb.java new file mode 100755 index 000000000..cb9d5e87d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List credentials */ +@Generated +class ListCredentialsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("purpose") + private CredentialPurpose purpose; + + public ListCredentialsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListCredentialsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListCredentialsRequestPb setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsRequestPb that = (ListCredentialsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(purpose, that.purpose); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, purpose); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("purpose", purpose) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java index 79ebd1904..ddef2bf18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListCredentialsResponse.ListCredentialsResponseSerializer.class) +@JsonDeserialize(using = ListCredentialsResponse.ListCredentialsResponseDeserializer.class) public class ListCredentialsResponse { /** */ - @JsonProperty("credentials") private Collection credentials; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListCredentialsResponse setCredentials(Collection credentials) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListCredentialsResponsePb toPb() { + ListCredentialsResponsePb pb = new ListCredentialsResponsePb(); + pb.setCredentials(credentials); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListCredentialsResponse fromPb(ListCredentialsResponsePb pb) { + ListCredentialsResponse model = new ListCredentialsResponse(); + model.setCredentials(pb.getCredentials()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCredentialsResponsePb pb = mapper.readValue(p, ListCredentialsResponsePb.class); + return ListCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponsePb.java new file mode 100755 index 000000000..1425f5e1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCredentialsResponsePb { + @JsonProperty("credentials") + private Collection credentials; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListCredentialsResponsePb setCredentials(Collection credentials) { + this.credentials = credentials; + return this; + } + + public Collection getCredentials() { + return credentials; + } + + public ListCredentialsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsResponsePb that = (ListCredentialsResponsePb) o; + return Objects.equals(credentials, that.credentials) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(credentials, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsResponsePb.class) + .add("credentials", credentials) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java index 35c71f7a1..55f605500 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java @@ -3,20 +3,29 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List external locations */ @Generated +@JsonSerialize(using = ListExternalLocationsRequest.ListExternalLocationsRequestSerializer.class) +@JsonDeserialize( + using = ListExternalLocationsRequest.ListExternalLocationsRequestDeserializer.class) public class ListExternalLocationsRequest { /** * Whether to include external locations in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -26,13 +35,9 @@ public class ListExternalLocationsRequest { * a server configured value (recommended); - when set to a value less than 0, an invalid * parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListExternalLocationsRequest setIncludeBrowse(Boolean includeBrowse) { @@ -85,4 +90,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListExternalLocationsRequestPb toPb() { + ListExternalLocationsRequestPb pb = new ListExternalLocationsRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListExternalLocationsRequest fromPb(ListExternalLocationsRequestPb pb) { + ListExternalLocationsRequest model = new ListExternalLocationsRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListExternalLocationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExternalLocationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExternalLocationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExternalLocationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListExternalLocationsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExternalLocationsRequestPb pb = mapper.readValue(p, ListExternalLocationsRequestPb.class); + return ListExternalLocationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequestPb.java new file mode 100755 index 000000000..eef0658dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List external locations */ +@Generated +class ListExternalLocationsRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExternalLocationsRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListExternalLocationsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListExternalLocationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalLocationsRequestPb that = (ListExternalLocationsRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalLocationsRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java index f43f138f4..354e14042 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java @@ -4,21 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListExternalLocationsResponse.ListExternalLocationsResponseSerializer.class) +@JsonDeserialize( + using = ListExternalLocationsResponse.ListExternalLocationsResponseDeserializer.class) public class ListExternalLocationsResponse { /** An array of external locations. */ - @JsonProperty("external_locations") private Collection externalLocations; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListExternalLocationsResponse setExternalLocations( @@ -61,4 +71,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListExternalLocationsResponsePb toPb() { + ListExternalLocationsResponsePb pb = new ListExternalLocationsResponsePb(); + pb.setExternalLocations(externalLocations); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListExternalLocationsResponse fromPb(ListExternalLocationsResponsePb pb) { + ListExternalLocationsResponse model = new ListExternalLocationsResponse(); + model.setExternalLocations(pb.getExternalLocations()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListExternalLocationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExternalLocationsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExternalLocationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExternalLocationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListExternalLocationsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExternalLocationsResponsePb pb = + mapper.readValue(p, ListExternalLocationsResponsePb.class); + return ListExternalLocationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponsePb.java new file mode 100755 index 000000000..fb73b1707 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListExternalLocationsResponsePb { + @JsonProperty("external_locations") + private Collection externalLocations; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExternalLocationsResponsePb setExternalLocations( + Collection externalLocations) { + this.externalLocations = externalLocations; + return this; + } + + public Collection getExternalLocations() { + return externalLocations; + } + + public ListExternalLocationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalLocationsResponsePb that = (ListExternalLocationsResponsePb) o; + return Objects.equals(externalLocations, that.externalLocations) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(externalLocations, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalLocationsResponsePb.class) + .add("externalLocations", externalLocations) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java index 039752d25..a29b47e0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List functions */ @Generated +@JsonSerialize(using = ListFunctionsRequest.ListFunctionsRequestSerializer.class) +@JsonDeserialize(using = ListFunctionsRequest.ListFunctionsRequestDeserializer.class) public class ListFunctionsRequest { /** Name of parent catalog for functions of interest. */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** * Whether to include functions in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -31,18 +37,12 @@ public class ListFunctionsRequest { * configured value (recommended); - when set to a value less than 0, an invalid parameter error * is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Parent schema of functions. */ - @JsonIgnore - @QueryParam("schema_name") private String schemaName; public ListFunctionsRequest setCatalogName(String catalogName) { @@ -117,4 +117,48 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + ListFunctionsRequestPb toPb() { + ListFunctionsRequestPb pb = new ListFunctionsRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setSchemaName(schemaName); + + return pb; + } + + static ListFunctionsRequest fromPb(ListFunctionsRequestPb pb) { + ListFunctionsRequest model = new ListFunctionsRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class ListFunctionsRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListFunctionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFunctionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFunctionsRequestDeserializer + extends JsonDeserializer { + @Override + public ListFunctionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFunctionsRequestPb pb = mapper.readValue(p, ListFunctionsRequestPb.class); + return ListFunctionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequestPb.java new file mode 100755 index 000000000..57f9ef810 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List functions */ +@Generated +class ListFunctionsRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("schema_name") + private String schemaName; + + public ListFunctionsRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListFunctionsRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListFunctionsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListFunctionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListFunctionsRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFunctionsRequestPb that = (ListFunctionsRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); + } + + @Override + public String toString() { + return new ToStringer(ListFunctionsRequestPb.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java index b88b18e14..6ef902c9a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListFunctionsResponse.ListFunctionsResponseSerializer.class) +@JsonDeserialize(using = ListFunctionsResponse.ListFunctionsResponseDeserializer.class) public class ListFunctionsResponse { /** An array of function information objects. */ - @JsonProperty("functions") private Collection functions; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListFunctionsResponse setFunctions(Collection functions) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListFunctionsResponsePb toPb() { + ListFunctionsResponsePb pb = new ListFunctionsResponsePb(); + pb.setFunctions(functions); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListFunctionsResponse fromPb(ListFunctionsResponsePb pb) { + ListFunctionsResponse model = new ListFunctionsResponse(); + model.setFunctions(pb.getFunctions()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListFunctionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFunctionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFunctionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFunctionsResponseDeserializer + extends JsonDeserializer { + @Override + public ListFunctionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFunctionsResponsePb pb = mapper.readValue(p, ListFunctionsResponsePb.class); + return ListFunctionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponsePb.java new file mode 100755 index 000000000..e3542f5e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListFunctionsResponsePb { + @JsonProperty("functions") + private Collection functions; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListFunctionsResponsePb setFunctions(Collection functions) { + this.functions = functions; + return this; + } + + public Collection getFunctions() { + return functions; + } + + public ListFunctionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFunctionsResponsePb that = (ListFunctionsResponsePb) o; + return Objects.equals(functions, that.functions) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(functions, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFunctionsResponsePb.class) + .add("functions", functions) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java new file mode 100755 index 000000000..d1fb8b6fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** List metastores */ +@Generated +@JsonSerialize(using = ListMetastoresRequest.ListMetastoresRequestSerializer.class) +@JsonDeserialize(using = ListMetastoresRequest.ListMetastoresRequestDeserializer.class) +public class ListMetastoresRequest { + /** + * Maximum number of metastores to return. - when set to a value greater than 0, the page length + * is the minimum of this value and a server configured value; - when set to 0, the page length is + * set to a server configured value (recommended); - when set to a value less than 0, an invalid + * parameter error is returned; - If not set, all the metastores are returned (not recommended). - + * Note: The number of returned metastores might be less than the specified max_results size, even + * zero. The only definitive indication that no further metastores can be fetched is when the + * next_page_token is unset from the response. + */ + private Long maxResults; + + /** Opaque pagination token to go to next page based on previous query. */ + private String pageToken; + + public ListMetastoresRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListMetastoresRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMetastoresRequest that = (ListMetastoresRequest) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMetastoresRequest.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } + + ListMetastoresRequestPb toPb() { + ListMetastoresRequestPb pb = new ListMetastoresRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListMetastoresRequest fromPb(ListMetastoresRequestPb pb) { + ListMetastoresRequest model = new ListMetastoresRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListMetastoresRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListMetastoresRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListMetastoresRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListMetastoresRequestDeserializer + extends JsonDeserializer { + @Override + public ListMetastoresRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListMetastoresRequestPb pb = mapper.readValue(p, ListMetastoresRequestPb.class); + return ListMetastoresRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequestPb.java new file mode 100755 index 000000000..47fd8e3a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List metastores */ +@Generated +class ListMetastoresRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListMetastoresRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListMetastoresRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMetastoresRequestPb that = (ListMetastoresRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMetastoresRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java index 257aa2443..251417957 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java @@ -4,16 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListMetastoresResponse.ListMetastoresResponseSerializer.class) +@JsonDeserialize(using = ListMetastoresResponse.ListMetastoresResponseDeserializer.class) public class ListMetastoresResponse { /** An array of metastore information objects. */ - @JsonProperty("metastores") private Collection metastores; + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * __page_token__ should be set to this value for the next request (for the next page of results). + */ + private String nextPageToken; + public ListMetastoresResponse setMetastores(Collection metastores) { this.metastores = metastores; return this; @@ -23,21 +39,73 @@ public Collection getMetastores() { return metastores; } + public ListMetastoresResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListMetastoresResponse that = (ListMetastoresResponse) o; - return Objects.equals(metastores, that.metastores); + return Objects.equals(metastores, that.metastores) + && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(metastores); + return Objects.hash(metastores, nextPageToken); } @Override public String toString() { - return new ToStringer(ListMetastoresResponse.class).add("metastores", metastores).toString(); + return new ToStringer(ListMetastoresResponse.class) + .add("metastores", metastores) + .add("nextPageToken", nextPageToken) + .toString(); + } + + ListMetastoresResponsePb toPb() { + ListMetastoresResponsePb pb = new ListMetastoresResponsePb(); + pb.setMetastores(metastores); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListMetastoresResponse fromPb(ListMetastoresResponsePb pb) { + ListMetastoresResponse model = new ListMetastoresResponse(); + model.setMetastores(pb.getMetastores()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListMetastoresResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListMetastoresResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListMetastoresResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListMetastoresResponseDeserializer + extends JsonDeserializer { + @Override + public ListMetastoresResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListMetastoresResponsePb pb = mapper.readValue(p, ListMetastoresResponsePb.class); + return ListMetastoresResponse.fromPb(pb); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponsePb.java new file mode 100755 index 000000000..65d053774 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListMetastoresResponsePb { + @JsonProperty("metastores") + private Collection metastores; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListMetastoresResponsePb setMetastores(Collection metastores) { + this.metastores = metastores; + return this; + } + + public Collection getMetastores() { + return metastores; + } + + public ListMetastoresResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMetastoresResponsePb that = (ListMetastoresResponsePb) o; + return Objects.equals(metastores, that.metastores) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(metastores, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMetastoresResponsePb.class) + .add("metastores", metastores) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java index 6364b1653..006c2bdb0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java @@ -3,23 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List Model Versions */ @Generated +@JsonSerialize(using = ListModelVersionsRequest.ListModelVersionsRequestSerializer.class) +@JsonDeserialize(using = ListModelVersionsRequest.ListModelVersionsRequestDeserializer.class) public class ListModelVersionsRequest { /** The full three-level name of the registered model under which to list model versions */ - @JsonIgnore private String fullName; + private String fullName; /** * Whether to include model versions in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -29,13 +37,9 @@ public class ListModelVersionsRequest { * 0, the page length is set to a server configured value (100, as of 1/3/2024) (recommended); - * when set to a value less than 0, an invalid parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListModelVersionsRequest setFullName(String fullName) { @@ -99,4 +103,47 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListModelVersionsRequestPb toPb() { + ListModelVersionsRequestPb pb = new ListModelVersionsRequestPb(); + pb.setFullName(fullName); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListModelVersionsRequest fromPb(ListModelVersionsRequestPb pb) { + ListModelVersionsRequest model = new ListModelVersionsRequest(); + model.setFullName(pb.getFullName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListModelVersionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListModelVersionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListModelVersionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListModelVersionsRequestDeserializer + extends JsonDeserializer { + @Override + public ListModelVersionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListModelVersionsRequestPb pb = mapper.readValue(p, ListModelVersionsRequestPb.class); + return ListModelVersionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequestPb.java new file mode 100755 index 000000000..9637023fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequestPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Model Versions */ +@Generated +class ListModelVersionsRequestPb { + @JsonIgnore private String fullName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListModelVersionsRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public ListModelVersionsRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListModelVersionsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListModelVersionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListModelVersionsRequestPb that = (ListModelVersionsRequestPb) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeBrowse, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListModelVersionsRequestPb.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java index 33959df88..131f2ee27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListModelVersionsResponse.ListModelVersionsResponseSerializer.class) +@JsonDeserialize(using = ListModelVersionsResponse.ListModelVersionsResponseDeserializer.class) public class ListModelVersionsResponse { /** */ - @JsonProperty("model_versions") private Collection modelVersions; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListModelVersionsResponse setModelVersions(Collection modelVersions) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListModelVersionsResponsePb toPb() { + ListModelVersionsResponsePb pb = new ListModelVersionsResponsePb(); + pb.setModelVersions(modelVersions); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListModelVersionsResponse fromPb(ListModelVersionsResponsePb pb) { + ListModelVersionsResponse model = new ListModelVersionsResponse(); + model.setModelVersions(pb.getModelVersions()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListModelVersionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListModelVersionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListModelVersionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListModelVersionsResponseDeserializer + extends JsonDeserializer { + @Override + public ListModelVersionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListModelVersionsResponsePb pb = mapper.readValue(p, ListModelVersionsResponsePb.class); + return ListModelVersionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponsePb.java new file mode 100755 index 000000000..5fb22ef72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListModelVersionsResponsePb { + @JsonProperty("model_versions") + private Collection modelVersions; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListModelVersionsResponsePb setModelVersions(Collection modelVersions) { + this.modelVersions = modelVersions; + return this; + } + + public Collection getModelVersions() { + return modelVersions; + } + + public ListModelVersionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListModelVersionsResponsePb that = (ListModelVersionsResponsePb) o; + return Objects.equals(modelVersions, that.modelVersions) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersions, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListModelVersionsResponsePb.class) + .add("modelVersions", modelVersions) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java index 0fa5831ae..cfd420016 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all resource quotas under a metastore. */ @Generated +@JsonSerialize(using = ListQuotasRequest.ListQuotasRequestSerializer.class) +@JsonDeserialize(using = ListQuotasRequest.ListQuotasRequestDeserializer.class) public class ListQuotasRequest { /** The number of quotas to return. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque token for the next page of results. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListQuotasRequest setMaxResults(Long maxResults) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListQuotasRequestPb toPb() { + ListQuotasRequestPb pb = new ListQuotasRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListQuotasRequest fromPb(ListQuotasRequestPb pb) { + ListQuotasRequest model = new ListQuotasRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListQuotasRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListQuotasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQuotasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQuotasRequestDeserializer extends JsonDeserializer { + @Override + public ListQuotasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQuotasRequestPb pb = mapper.readValue(p, ListQuotasRequestPb.class); + return ListQuotasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequestPb.java new file mode 100755 index 000000000..b330ee80c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all resource quotas under a metastore. */ +@Generated +class ListQuotasRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListQuotasRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListQuotasRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQuotasRequestPb that = (ListQuotasRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQuotasRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponse.java index 42a68e3a9..d75337913 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListQuotasResponse.ListQuotasResponseSerializer.class) +@JsonDeserialize(using = ListQuotasResponse.ListQuotasResponseDeserializer.class) public class ListQuotasResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request. */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of returned QuotaInfos. */ - @JsonProperty("quotas") private Collection quotas; public ListQuotasResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,40 @@ public String toString() { .add("quotas", quotas) .toString(); } + + ListQuotasResponsePb toPb() { + ListQuotasResponsePb pb = new ListQuotasResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setQuotas(quotas); + + return pb; + } + + static ListQuotasResponse fromPb(ListQuotasResponsePb pb) { + ListQuotasResponse model = new ListQuotasResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setQuotas(pb.getQuotas()); + + return model; + } + + public static class ListQuotasResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListQuotasResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQuotasResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQuotasResponseDeserializer extends JsonDeserializer { + @Override + public ListQuotasResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQuotasResponsePb pb = mapper.readValue(p, ListQuotasResponsePb.class); + return ListQuotasResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponsePb.java new file mode 100755 index 000000000..4f61c76e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListQuotasResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("quotas") + private Collection quotas; + + public ListQuotasResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQuotasResponsePb setQuotas(Collection quotas) { + this.quotas = quotas; + return this; + } + + public Collection getQuotas() { + return quotas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQuotasResponsePb that = (ListQuotasResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(quotas, that.quotas); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, quotas); + } + + @Override + public String toString() { + return new ToStringer(ListQuotasResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("quotas", quotas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java index b45a0442c..9ed9e068a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List refreshes */ @Generated +@JsonSerialize(using = ListRefreshesRequest.ListRefreshesRequestSerializer.class) +@JsonDeserialize(using = ListRefreshesRequest.ListRefreshesRequestDeserializer.class) public class ListRefreshesRequest { /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public ListRefreshesRequest setTableName(String tableName) { this.tableName = tableName; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(ListRefreshesRequest.class).add("tableName", tableName).toString(); } + + ListRefreshesRequestPb toPb() { + ListRefreshesRequestPb pb = new ListRefreshesRequestPb(); + pb.setTableName(tableName); + + return pb; + } + + static ListRefreshesRequest fromPb(ListRefreshesRequestPb pb) { + ListRefreshesRequest model = new ListRefreshesRequest(); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class ListRefreshesRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListRefreshesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRefreshesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRefreshesRequestDeserializer + extends JsonDeserializer { + @Override + public ListRefreshesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRefreshesRequestPb pb = mapper.readValue(p, ListRefreshesRequestPb.class); + return ListRefreshesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequestPb.java new file mode 100755 index 000000000..0a983641b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List refreshes */ +@Generated +class ListRefreshesRequestPb { + @JsonIgnore private String tableName; + + public ListRefreshesRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRefreshesRequestPb that = (ListRefreshesRequestPb) o; + return Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(tableName); + } + + @Override + public String toString() { + return new ToStringer(ListRefreshesRequestPb.class).add("tableName", tableName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java index f319d1ed7..9827886e6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java @@ -3,28 +3,34 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List Registered Models */ @Generated +@JsonSerialize(using = ListRegisteredModelsRequest.ListRegisteredModelsRequestSerializer.class) +@JsonDeserialize(using = ListRegisteredModelsRequest.ListRegisteredModelsRequestDeserializer.class) public class ListRegisteredModelsRequest { /** * The identifier of the catalog under which to list registered models. If specified, schema_name * must be specified. */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** * Whether to include registered models in the response for which the principal can only access * selective metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -43,21 +49,15 @@ public class ListRegisteredModelsRequest { * 4/2/2024); - when set to 0, the page length is set to a server configured value (100, as of * 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque token to send for the next page of results (pagination). */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * The identifier of the schema under which to list registered models. If specified, catalog_name * must be specified. */ - @JsonIgnore - @QueryParam("schema_name") private String schemaName; public ListRegisteredModelsRequest setCatalogName(String catalogName) { @@ -132,4 +132,49 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + ListRegisteredModelsRequestPb toPb() { + ListRegisteredModelsRequestPb pb = new ListRegisteredModelsRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setSchemaName(schemaName); + + return pb; + } + + static ListRegisteredModelsRequest fromPb(ListRegisteredModelsRequestPb pb) { + ListRegisteredModelsRequest model = new ListRegisteredModelsRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class ListRegisteredModelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListRegisteredModelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRegisteredModelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRegisteredModelsRequestDeserializer + extends JsonDeserializer { + @Override + public ListRegisteredModelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRegisteredModelsRequestPb pb = mapper.readValue(p, ListRegisteredModelsRequestPb.class); + return ListRegisteredModelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequestPb.java new file mode 100755 index 000000000..1636b6b1f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Registered Models */ +@Generated +class ListRegisteredModelsRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("schema_name") + private String schemaName; + + public ListRegisteredModelsRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListRegisteredModelsRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListRegisteredModelsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListRegisteredModelsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListRegisteredModelsRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRegisteredModelsRequestPb that = (ListRegisteredModelsRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); + } + + @Override + public String toString() { + return new ToStringer(ListRegisteredModelsRequestPb.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java index 0f2d052df..9fc78a2d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java @@ -4,21 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListRegisteredModelsResponse.ListRegisteredModelsResponseSerializer.class) +@JsonDeserialize( + using = ListRegisteredModelsResponse.ListRegisteredModelsResponseDeserializer.class) public class ListRegisteredModelsResponse { /** * Opaque token for pagination. Omitted if there are no more results. page_token should be set to * this value for fetching the next page. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("registered_models") private Collection registeredModels; public ListRegisteredModelsResponse setNextPageToken(String nextPageToken) { @@ -61,4 +71,43 @@ public String toString() { .add("registeredModels", registeredModels) .toString(); } + + ListRegisteredModelsResponsePb toPb() { + ListRegisteredModelsResponsePb pb = new ListRegisteredModelsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRegisteredModels(registeredModels); + + return pb; + } + + static ListRegisteredModelsResponse fromPb(ListRegisteredModelsResponsePb pb) { + ListRegisteredModelsResponse model = new ListRegisteredModelsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRegisteredModels(pb.getRegisteredModels()); + + return model; + } + + public static class ListRegisteredModelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListRegisteredModelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRegisteredModelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRegisteredModelsResponseDeserializer + extends JsonDeserializer { + @Override + public ListRegisteredModelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRegisteredModelsResponsePb pb = mapper.readValue(p, ListRegisteredModelsResponsePb.class); + return ListRegisteredModelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponsePb.java new file mode 100755 index 000000000..0eb7d1c9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListRegisteredModelsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("registered_models") + private Collection registeredModels; + + public ListRegisteredModelsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListRegisteredModelsResponsePb setRegisteredModels( + Collection registeredModels) { + this.registeredModels = registeredModels; + return this; + } + + public Collection getRegisteredModels() { + return registeredModels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRegisteredModelsResponsePb that = (ListRegisteredModelsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(registeredModels, that.registeredModels); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, registeredModels); + } + + @Override + public String toString() { + return new ToStringer(ListRegisteredModelsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("registeredModels", registeredModels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java index 965bcf41d..ca5cb74ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List schemas */ @Generated +@JsonSerialize(using = ListSchemasRequest.ListSchemasRequestSerializer.class) +@JsonDeserialize(using = ListSchemasRequest.ListSchemasRequestDeserializer.class) public class ListSchemasRequest { /** Parent catalog for schemas of interest. */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** * Whether to include schemas in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -31,13 +37,9 @@ public class ListSchemasRequest { * configured value (recommended); - when set to a value less than 0, an invalid parameter error * is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListSchemasRequest setCatalogName(String catalogName) { @@ -101,4 +103,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListSchemasRequestPb toPb() { + ListSchemasRequestPb pb = new ListSchemasRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListSchemasRequest fromPb(ListSchemasRequestPb pb) { + ListSchemasRequest model = new ListSchemasRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListSchemasRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListSchemasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSchemasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSchemasRequestDeserializer extends JsonDeserializer { + @Override + public ListSchemasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSchemasRequestPb pb = mapper.readValue(p, ListSchemasRequestPb.class); + return ListSchemasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequestPb.java new file mode 100755 index 000000000..6bf37a9c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List schemas */ +@Generated +class ListSchemasRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListSchemasRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListSchemasRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListSchemasRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListSchemasRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSchemasRequestPb that = (ListSchemasRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListSchemasRequestPb.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java index 23021dcba..a1aa61399 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSchemasResponse.ListSchemasResponseSerializer.class) +@JsonDeserialize(using = ListSchemasResponse.ListSchemasResponseDeserializer.class) public class ListSchemasResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of schema information objects. */ - @JsonProperty("schemas") private Collection schemas; public ListSchemasResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,41 @@ public String toString() { .add("schemas", schemas) .toString(); } + + ListSchemasResponsePb toPb() { + ListSchemasResponsePb pb = new ListSchemasResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSchemas(schemas); + + return pb; + } + + static ListSchemasResponse fromPb(ListSchemasResponsePb pb) { + ListSchemasResponse model = new ListSchemasResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSchemas(pb.getSchemas()); + + return model; + } + + public static class ListSchemasResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListSchemasResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSchemasResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSchemasResponseDeserializer + extends JsonDeserializer { + @Override + public ListSchemasResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSchemasResponsePb pb = mapper.readValue(p, ListSchemasResponsePb.class); + return ListSchemasResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponsePb.java new file mode 100755 index 000000000..0ec6e555b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSchemasResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("schemas") + private Collection schemas; + + public ListSchemasResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSchemasResponsePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSchemasResponsePb that = (ListSchemasResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(schemas, that.schemas); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, schemas); + } + + @Override + public String toString() { + return new ToStringer(ListSchemasResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("schemas", schemas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java index 75755ef16..c529e8663 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java @@ -3,13 +3,24 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List credentials */ @Generated +@JsonSerialize(using = ListStorageCredentialsRequest.ListStorageCredentialsRequestSerializer.class) +@JsonDeserialize( + using = ListStorageCredentialsRequest.ListStorageCredentialsRequestDeserializer.class) public class ListStorageCredentialsRequest { /** * Maximum number of storage credentials to return. If not set, all the storage credentials are @@ -18,13 +29,9 @@ public class ListStorageCredentialsRequest { * a server configured value (recommended); - when set to a value less than 0, an invalid * parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListStorageCredentialsRequest setMaxResults(Long maxResults) { @@ -65,4 +72,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListStorageCredentialsRequestPb toPb() { + ListStorageCredentialsRequestPb pb = new ListStorageCredentialsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListStorageCredentialsRequest fromPb(ListStorageCredentialsRequestPb pb) { + ListStorageCredentialsRequest model = new ListStorageCredentialsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListStorageCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListStorageCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListStorageCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListStorageCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public ListStorageCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListStorageCredentialsRequestPb pb = + mapper.readValue(p, ListStorageCredentialsRequestPb.class); + return ListStorageCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequestPb.java new file mode 100755 index 000000000..49c96d63d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List credentials */ +@Generated +class ListStorageCredentialsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListStorageCredentialsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListStorageCredentialsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListStorageCredentialsRequestPb that = (ListStorageCredentialsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListStorageCredentialsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java index 25ef95015..2ba115a52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListStorageCredentialsResponse.ListStorageCredentialsResponseSerializer.class) +@JsonDeserialize( + using = ListStorageCredentialsResponse.ListStorageCredentialsResponseDeserializer.class) public class ListStorageCredentialsResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("storage_credentials") private Collection storageCredentials; public ListStorageCredentialsResponse setNextPageToken(String nextPageToken) { @@ -61,4 +72,44 @@ public String toString() { .add("storageCredentials", storageCredentials) .toString(); } + + ListStorageCredentialsResponsePb toPb() { + ListStorageCredentialsResponsePb pb = new ListStorageCredentialsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setStorageCredentials(storageCredentials); + + return pb; + } + + static ListStorageCredentialsResponse fromPb(ListStorageCredentialsResponsePb pb) { + ListStorageCredentialsResponse model = new ListStorageCredentialsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setStorageCredentials(pb.getStorageCredentials()); + + return model; + } + + public static class ListStorageCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListStorageCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListStorageCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListStorageCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public ListStorageCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListStorageCredentialsResponsePb pb = + mapper.readValue(p, ListStorageCredentialsResponsePb.class); + return ListStorageCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponsePb.java new file mode 100755 index 000000000..cb3def1c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListStorageCredentialsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("storage_credentials") + private Collection storageCredentials; + + public ListStorageCredentialsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListStorageCredentialsResponsePb setStorageCredentials( + Collection storageCredentials) { + this.storageCredentials = storageCredentials; + return this; + } + + public Collection getStorageCredentials() { + return storageCredentials; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListStorageCredentialsResponsePb that = (ListStorageCredentialsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(storageCredentials, that.storageCredentials); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, storageCredentials); + } + + @Override + public String toString() { + return new ToStringer(ListStorageCredentialsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("storageCredentials", storageCredentials) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java index 0aa5904fb..b3c4fc074 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List table summaries */ @Generated +@JsonSerialize(using = ListSummariesRequest.ListSummariesRequestSerializer.class) +@JsonDeserialize(using = ListSummariesRequest.ListSummariesRequestDeserializer.class) public class ListSummariesRequest { /** Name of parent catalog for tables of interest. */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** Whether to include a manifest containing capabilities the table has. */ - @JsonIgnore - @QueryParam("include_manifest_capabilities") private Boolean includeManifestCapabilities; /** @@ -28,28 +34,20 @@ public class ListSummariesRequest { * when set to 0, the page length is set to a server configured value (10000, as of 1/5/2024) * (recommended); - when set to a value less than 0, an invalid parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or * empty. */ - @JsonIgnore - @QueryParam("schema_name_pattern") private String schemaNamePattern; /** * A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty. */ - @JsonIgnore - @QueryParam("table_name_pattern") private String tableNamePattern; public ListSummariesRequest setCatalogName(String catalogName) { @@ -141,4 +139,50 @@ public String toString() { .add("tableNamePattern", tableNamePattern) .toString(); } + + ListSummariesRequestPb toPb() { + ListSummariesRequestPb pb = new ListSummariesRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeManifestCapabilities(includeManifestCapabilities); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setSchemaNamePattern(schemaNamePattern); + pb.setTableNamePattern(tableNamePattern); + + return pb; + } + + static ListSummariesRequest fromPb(ListSummariesRequestPb pb) { + ListSummariesRequest model = new ListSummariesRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeManifestCapabilities(pb.getIncludeManifestCapabilities()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setSchemaNamePattern(pb.getSchemaNamePattern()); + model.setTableNamePattern(pb.getTableNamePattern()); + + return model; + } + + public static class ListSummariesRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListSummariesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSummariesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSummariesRequestDeserializer + extends JsonDeserializer { + @Override + public ListSummariesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSummariesRequestPb pb = mapper.readValue(p, ListSummariesRequestPb.class); + return ListSummariesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequestPb.java new file mode 100755 index 000000000..b07248c57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequestPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List table summaries */ +@Generated +class ListSummariesRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_manifest_capabilities") + private Boolean includeManifestCapabilities; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("schema_name_pattern") + private String schemaNamePattern; + + @JsonIgnore + @QueryParam("table_name_pattern") + private String tableNamePattern; + + public ListSummariesRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListSummariesRequestPb setIncludeManifestCapabilities( + Boolean includeManifestCapabilities) { + this.includeManifestCapabilities = includeManifestCapabilities; + return this; + } + + public Boolean getIncludeManifestCapabilities() { + return includeManifestCapabilities; + } + + public ListSummariesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListSummariesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListSummariesRequestPb setSchemaNamePattern(String schemaNamePattern) { + this.schemaNamePattern = schemaNamePattern; + return this; + } + + public String getSchemaNamePattern() { + return schemaNamePattern; + } + + public ListSummariesRequestPb setTableNamePattern(String tableNamePattern) { + this.tableNamePattern = tableNamePattern; + return this; + } + + public String getTableNamePattern() { + return tableNamePattern; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSummariesRequestPb that = (ListSummariesRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaNamePattern, that.schemaNamePattern) + && Objects.equals(tableNamePattern, that.tableNamePattern); + } + + @Override + public int hashCode() { + return Objects.hash( + catalogName, + includeManifestCapabilities, + maxResults, + pageToken, + schemaNamePattern, + tableNamePattern); + } + + @Override + public String toString() { + return new ToStringer(ListSummariesRequestPb.class) + .add("catalogName", catalogName) + .add("includeManifestCapabilities", includeManifestCapabilities) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("schemaNamePattern", schemaNamePattern) + .add("tableNamePattern", tableNamePattern) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java index b48792337..7ef37b418 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List system schemas */ @Generated +@JsonSerialize(using = ListSystemSchemasRequest.ListSystemSchemasRequestSerializer.class) +@JsonDeserialize(using = ListSystemSchemasRequest.ListSystemSchemasRequestDeserializer.class) public class ListSystemSchemasRequest { /** * Maximum number of schemas to return. - When set to 0, the page length is set to a server @@ -18,16 +28,12 @@ public class ListSystemSchemasRequest { * invalid parameter error is returned; - If not set, all the schemas are returned (not * recommended). */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** The ID for the metastore in which the system schema resides. */ - @JsonIgnore private String metastoreId; + private String metastoreId; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListSystemSchemasRequest setMaxResults(Long maxResults) { @@ -80,4 +86,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListSystemSchemasRequestPb toPb() { + ListSystemSchemasRequestPb pb = new ListSystemSchemasRequestPb(); + pb.setMaxResults(maxResults); + pb.setMetastoreId(metastoreId); + pb.setPageToken(pageToken); + + return pb; + } + + static ListSystemSchemasRequest fromPb(ListSystemSchemasRequestPb pb) { + ListSystemSchemasRequest model = new ListSystemSchemasRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setMetastoreId(pb.getMetastoreId()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListSystemSchemasRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListSystemSchemasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSystemSchemasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSystemSchemasRequestDeserializer + extends JsonDeserializer { + @Override + public ListSystemSchemasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSystemSchemasRequestPb pb = mapper.readValue(p, ListSystemSchemasRequestPb.class); + return ListSystemSchemasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequestPb.java new file mode 100755 index 000000000..3fa6e1bfd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List system schemas */ +@Generated +class ListSystemSchemasRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore private String metastoreId; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListSystemSchemasRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListSystemSchemasRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ListSystemSchemasRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSystemSchemasRequestPb that = (ListSystemSchemasRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, metastoreId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListSystemSchemasRequestPb.class) + .add("maxResults", maxResults) + .add("metastoreId", metastoreId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponse.java index ac970407e..c253f0da6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSystemSchemasResponse.ListSystemSchemasResponseSerializer.class) +@JsonDeserialize(using = ListSystemSchemasResponse.ListSystemSchemasResponseDeserializer.class) public class ListSystemSchemasResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of system schema information objects. */ - @JsonProperty("schemas") private Collection schemas; public ListSystemSchemasResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("schemas", schemas) .toString(); } + + ListSystemSchemasResponsePb toPb() { + ListSystemSchemasResponsePb pb = new ListSystemSchemasResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSchemas(schemas); + + return pb; + } + + static ListSystemSchemasResponse fromPb(ListSystemSchemasResponsePb pb) { + ListSystemSchemasResponse model = new ListSystemSchemasResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSchemas(pb.getSchemas()); + + return model; + } + + public static class ListSystemSchemasResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListSystemSchemasResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSystemSchemasResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSystemSchemasResponseDeserializer + extends JsonDeserializer { + @Override + public ListSystemSchemasResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSystemSchemasResponsePb pb = mapper.readValue(p, ListSystemSchemasResponsePb.class); + return ListSystemSchemasResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponsePb.java new file mode 100755 index 000000000..3f11ebb38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSystemSchemasResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("schemas") + private Collection schemas; + + public ListSystemSchemasResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSystemSchemasResponsePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSystemSchemasResponsePb that = (ListSystemSchemasResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(schemas, that.schemas); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, schemas); + } + + @Override + public String toString() { + return new ToStringer(ListSystemSchemasResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("schemas", schemas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java index bfe514e7b..4d99ab826 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListTableSummariesResponse.ListTableSummariesResponseSerializer.class) +@JsonDeserialize(using = ListTableSummariesResponse.ListTableSummariesResponseDeserializer.class) public class ListTableSummariesResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** List of table summaries. */ - @JsonProperty("tables") private Collection tables; public ListTableSummariesResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,43 @@ public String toString() { .add("tables", tables) .toString(); } + + ListTableSummariesResponsePb toPb() { + ListTableSummariesResponsePb pb = new ListTableSummariesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setTables(tables); + + return pb; + } + + static ListTableSummariesResponse fromPb(ListTableSummariesResponsePb pb) { + ListTableSummariesResponse model = new ListTableSummariesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setTables(pb.getTables()); + + return model; + } + + public static class ListTableSummariesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListTableSummariesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTableSummariesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTableSummariesResponseDeserializer + extends JsonDeserializer { + @Override + public ListTableSummariesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTableSummariesResponsePb pb = mapper.readValue(p, ListTableSummariesResponsePb.class); + return ListTableSummariesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponsePb.java new file mode 100755 index 000000000..80c5e258f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListTableSummariesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("tables") + private Collection tables; + + public ListTableSummariesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListTableSummariesResponsePb setTables(Collection tables) { + this.tables = tables; + return this; + } + + public Collection getTables() { + return tables; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTableSummariesResponsePb that = (ListTableSummariesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(tables, that.tables); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, tables); + } + + @Override + public String toString() { + return new ToStringer(ListTableSummariesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("tables", tables) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java index f5d9fd2f8..827869679 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java @@ -3,35 +3,37 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List tables */ @Generated +@JsonSerialize(using = ListTablesRequest.ListTablesRequestSerializer.class) +@JsonDeserialize(using = ListTablesRequest.ListTablesRequestDeserializer.class) public class ListTablesRequest { /** Name of parent catalog for tables of interest. */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** * Whether to include tables in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** Whether delta metadata should be included in the response. */ - @JsonIgnore - @QueryParam("include_delta_metadata") private Boolean includeDeltaMetadata; /** Whether to include a manifest containing capabilities the table has. */ - @JsonIgnore - @QueryParam("include_manifest_capabilities") private Boolean includeManifestCapabilities; /** @@ -40,36 +42,24 @@ public class ListTablesRequest { * configured value; - when set to 0, the page length is set to a server configured value * (recommended); - when set to a value less than 0, an invalid parameter error is returned; */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Whether to omit the columns of the table from the response or not. */ - @JsonIgnore - @QueryParam("omit_columns") private Boolean omitColumns; /** Whether to omit the properties of the table from the response or not. */ - @JsonIgnore - @QueryParam("omit_properties") private Boolean omitProperties; /** * Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the * response or not. */ - @JsonIgnore - @QueryParam("omit_username") private Boolean omitUsername; /** Opaque token to send for the next page of results (pagination). */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Parent schema of tables. */ - @JsonIgnore - @QueryParam("schema_name") private String schemaName; public ListTablesRequest setCatalogName(String catalogName) { @@ -209,4 +199,56 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + ListTablesRequestPb toPb() { + ListTablesRequestPb pb = new ListTablesRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeBrowse(includeBrowse); + pb.setIncludeDeltaMetadata(includeDeltaMetadata); + pb.setIncludeManifestCapabilities(includeManifestCapabilities); + pb.setMaxResults(maxResults); + pb.setOmitColumns(omitColumns); + pb.setOmitProperties(omitProperties); + pb.setOmitUsername(omitUsername); + pb.setPageToken(pageToken); + pb.setSchemaName(schemaName); + + return pb; + } + + static ListTablesRequest fromPb(ListTablesRequestPb pb) { + ListTablesRequest model = new ListTablesRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setIncludeDeltaMetadata(pb.getIncludeDeltaMetadata()); + model.setIncludeManifestCapabilities(pb.getIncludeManifestCapabilities()); + model.setMaxResults(pb.getMaxResults()); + model.setOmitColumns(pb.getOmitColumns()); + model.setOmitProperties(pb.getOmitProperties()); + model.setOmitUsername(pb.getOmitUsername()); + model.setPageToken(pb.getPageToken()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class ListTablesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListTablesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTablesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTablesRequestDeserializer extends JsonDeserializer { + @Override + public ListTablesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTablesRequestPb pb = mapper.readValue(p, ListTablesRequestPb.class); + return ListTablesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequestPb.java new file mode 100755 index 000000000..ec6b50f60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequestPb.java @@ -0,0 +1,191 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List tables */ +@Generated +class ListTablesRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("include_delta_metadata") + private Boolean includeDeltaMetadata; + + @JsonIgnore + @QueryParam("include_manifest_capabilities") + private Boolean includeManifestCapabilities; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("omit_columns") + private Boolean omitColumns; + + @JsonIgnore + @QueryParam("omit_properties") + private Boolean omitProperties; + + @JsonIgnore + @QueryParam("omit_username") + private Boolean omitUsername; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("schema_name") + private String schemaName; + + public ListTablesRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListTablesRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListTablesRequestPb setIncludeDeltaMetadata(Boolean includeDeltaMetadata) { + this.includeDeltaMetadata = includeDeltaMetadata; + return this; + } + + public Boolean getIncludeDeltaMetadata() { + return includeDeltaMetadata; + } + + public ListTablesRequestPb setIncludeManifestCapabilities(Boolean includeManifestCapabilities) { + this.includeManifestCapabilities = includeManifestCapabilities; + return this; + } + + public Boolean getIncludeManifestCapabilities() { + return includeManifestCapabilities; + } + + public ListTablesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListTablesRequestPb setOmitColumns(Boolean omitColumns) { + this.omitColumns = omitColumns; + return this; + } + + public Boolean getOmitColumns() { + return omitColumns; + } + + public ListTablesRequestPb setOmitProperties(Boolean omitProperties) { + this.omitProperties = omitProperties; + return this; + } + + public Boolean getOmitProperties() { + return omitProperties; + } + + public ListTablesRequestPb setOmitUsername(Boolean omitUsername) { + this.omitUsername = omitUsername; + return this; + } + + public Boolean getOmitUsername() { + return omitUsername; + } + + public ListTablesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListTablesRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTablesRequestPb that = (ListTablesRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata) + && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(omitColumns, that.omitColumns) + && Objects.equals(omitProperties, that.omitProperties) + && Objects.equals(omitUsername, that.omitUsername) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash( + catalogName, + includeBrowse, + includeDeltaMetadata, + includeManifestCapabilities, + maxResults, + omitColumns, + omitProperties, + omitUsername, + pageToken, + schemaName); + } + + @Override + public String toString() { + return new ToStringer(ListTablesRequestPb.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("includeDeltaMetadata", includeDeltaMetadata) + .add("includeManifestCapabilities", includeManifestCapabilities) + .add("maxResults", maxResults) + .add("omitColumns", omitColumns) + .add("omitProperties", omitProperties) + .add("omitUsername", omitUsername) + .add("pageToken", pageToken) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java index 429103c82..7377d5146 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListTablesResponse.ListTablesResponseSerializer.class) +@JsonDeserialize(using = ListTablesResponse.ListTablesResponseDeserializer.class) public class ListTablesResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of table information objects. */ - @JsonProperty("tables") private Collection tables; public ListTablesResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,40 @@ public String toString() { .add("tables", tables) .toString(); } + + ListTablesResponsePb toPb() { + ListTablesResponsePb pb = new ListTablesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setTables(tables); + + return pb; + } + + static ListTablesResponse fromPb(ListTablesResponsePb pb) { + ListTablesResponse model = new ListTablesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setTables(pb.getTables()); + + return model; + } + + public static class ListTablesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListTablesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTablesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTablesResponseDeserializer extends JsonDeserializer { + @Override + public ListTablesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTablesResponsePb pb = mapper.readValue(p, ListTablesResponsePb.class); + return ListTablesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponsePb.java new file mode 100755 index 000000000..6a55d4955 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListTablesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("tables") + private Collection tables; + + public ListTablesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListTablesResponsePb setTables(Collection tables) { + this.tables = tables; + return this; + } + + public Collection getTables() { + return tables; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTablesResponsePb that = (ListTablesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(tables, that.tables); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, tables); + } + + @Override + public String toString() { + return new ToStringer(ListTablesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("tables", tables) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java index f7126ee0e..f636b4299 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List Volumes */ @Generated +@JsonSerialize(using = ListVolumesRequest.ListVolumesRequestSerializer.class) +@JsonDeserialize(using = ListVolumesRequest.ListVolumesRequestDeserializer.class) public class ListVolumesRequest { /** The identifier of the catalog */ - @JsonIgnore - @QueryParam("catalog_name") private String catalogName; /** * Whether to include volumes in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** @@ -37,21 +43,15 @@ public class ListVolumesRequest { * number of volumes returned in a page may be smaller than this value, including 0, even if there * are more pages. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** * Opaque token returned by a previous request. It must be included in the request to retrieve the * next page of results (pagination). */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The identifier of the schema */ - @JsonIgnore - @QueryParam("schema_name") private String schemaName; public ListVolumesRequest setCatalogName(String catalogName) { @@ -126,4 +126,46 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + ListVolumesRequestPb toPb() { + ListVolumesRequestPb pb = new ListVolumesRequestPb(); + pb.setCatalogName(catalogName); + pb.setIncludeBrowse(includeBrowse); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setSchemaName(schemaName); + + return pb; + } + + static ListVolumesRequest fromPb(ListVolumesRequestPb pb) { + ListVolumesRequest model = new ListVolumesRequest(); + model.setCatalogName(pb.getCatalogName()); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class ListVolumesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListVolumesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListVolumesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListVolumesRequestDeserializer extends JsonDeserializer { + @Override + public ListVolumesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListVolumesRequestPb pb = mapper.readValue(p, ListVolumesRequestPb.class); + return ListVolumesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequestPb.java new file mode 100755 index 000000000..b81f2ca11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Volumes */ +@Generated +class ListVolumesRequestPb { + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("schema_name") + private String schemaName; + + public ListVolumesRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListVolumesRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListVolumesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListVolumesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListVolumesRequestPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListVolumesRequestPb that = (ListVolumesRequestPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); + } + + @Override + public String toString() { + return new ToStringer(ListVolumesRequestPb.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java index 2df7aebe6..222e273d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListVolumesResponseContent.ListVolumesResponseContentSerializer.class) +@JsonDeserialize(using = ListVolumesResponseContent.ListVolumesResponseContentDeserializer.class) public class ListVolumesResponseContent { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request to retrieve the next page of * results. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("volumes") private Collection volumes; public ListVolumesResponseContent setNextPageToken(String nextPageToken) { @@ -61,4 +70,43 @@ public String toString() { .add("volumes", volumes) .toString(); } + + ListVolumesResponseContentPb toPb() { + ListVolumesResponseContentPb pb = new ListVolumesResponseContentPb(); + pb.setNextPageToken(nextPageToken); + pb.setVolumes(volumes); + + return pb; + } + + static ListVolumesResponseContent fromPb(ListVolumesResponseContentPb pb) { + ListVolumesResponseContent model = new ListVolumesResponseContent(); + model.setNextPageToken(pb.getNextPageToken()); + model.setVolumes(pb.getVolumes()); + + return model; + } + + public static class ListVolumesResponseContentSerializer + extends JsonSerializer { + @Override + public void serialize( + ListVolumesResponseContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListVolumesResponseContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListVolumesResponseContentDeserializer + extends JsonDeserializer { + @Override + public ListVolumesResponseContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListVolumesResponseContentPb pb = mapper.readValue(p, ListVolumesResponseContentPb.class); + return ListVolumesResponseContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContentPb.java new file mode 100755 index 000000000..ad093f6f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContentPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListVolumesResponseContentPb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("volumes") + private Collection volumes; + + public ListVolumesResponseContentPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListVolumesResponseContentPb setVolumes(Collection volumes) { + this.volumes = volumes; + return this; + } + + public Collection getVolumes() { + return volumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListVolumesResponseContentPb that = (ListVolumesResponseContentPb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(volumes, that.volumes); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, volumes); + } + + @Override + public String toString() { + return new ToStringer(ListVolumesResponseContentPb.class) + .add("nextPageToken", nextPageToken) + .add("volumes", volumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java index 0d9640d5f..10a224fca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MetastoreAssignment.MetastoreAssignmentSerializer.class) +@JsonDeserialize(using = MetastoreAssignment.MetastoreAssignmentDeserializer.class) public class MetastoreAssignment { /** The name of the default catalog in the metastore. */ - @JsonProperty("default_catalog_name") private String defaultCatalogName; /** The unique ID of the metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** The unique ID of the Databricks workspace. */ - @JsonProperty("workspace_id") private Long workspaceId; public MetastoreAssignment setDefaultCatalogName(String defaultCatalogName) { @@ -71,4 +79,43 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + MetastoreAssignmentPb toPb() { + MetastoreAssignmentPb pb = new MetastoreAssignmentPb(); + pb.setDefaultCatalogName(defaultCatalogName); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static MetastoreAssignment fromPb(MetastoreAssignmentPb pb) { + MetastoreAssignment model = new MetastoreAssignment(); + model.setDefaultCatalogName(pb.getDefaultCatalogName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class MetastoreAssignmentSerializer extends JsonSerializer { + @Override + public void serialize(MetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public MetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MetastoreAssignmentPb pb = mapper.readValue(p, MetastoreAssignmentPb.class); + return MetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignmentPb.java new file mode 100755 index 000000000..b9b565fa6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignmentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MetastoreAssignmentPb { + @JsonProperty("default_catalog_name") + private String defaultCatalogName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public MetastoreAssignmentPb setDefaultCatalogName(String defaultCatalogName) { + this.defaultCatalogName = defaultCatalogName; + return this; + } + + public String getDefaultCatalogName() { + return defaultCatalogName; + } + + public MetastoreAssignmentPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public MetastoreAssignmentPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MetastoreAssignmentPb that = (MetastoreAssignmentPb) o; + return Objects.equals(defaultCatalogName, that.defaultCatalogName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(defaultCatalogName, metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(MetastoreAssignmentPb.class) + .add("defaultCatalogName", defaultCatalogName) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java index 2eef53dfc..f76137701 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java @@ -4,91 +4,83 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MetastoreInfo.MetastoreInfoSerializer.class) +@JsonDeserialize(using = MetastoreInfo.MetastoreInfoDeserializer.class) public class MetastoreInfo { /** Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`). */ - @JsonProperty("cloud") private String cloud; /** Time at which this metastore was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of metastore creator. */ - @JsonProperty("created_by") private String createdBy; /** Unique identifier of the metastore's (Default) Data Access Configuration. */ - @JsonProperty("default_data_access_config_id") private String defaultDataAccessConfigId; /** * The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta * Sharing as the official name. */ - @JsonProperty("delta_sharing_organization_name") private String deltaSharingOrganizationName; /** The lifetime of delta sharing recipient token in seconds. */ - @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") private Long deltaSharingRecipientTokenLifetimeInSeconds; /** The scope of Delta Sharing enabled for the metastore. */ - @JsonProperty("delta_sharing_scope") - private MetastoreInfoDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Whether to allow non-DBR clients to directly access entities under the metastore. */ - @JsonProperty("external_access_enabled") private Boolean externalAccessEnabled; /** * Globally unique metastore ID across clouds and regions, of the form * `cloud:region:metastore_id`. */ - @JsonProperty("global_metastore_id") private String globalMetastoreId; /** Unique identifier of metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** The user-specified name of the metastore. */ - @JsonProperty("name") private String name; /** The owner of the metastore. */ - @JsonProperty("owner") private String owner; /** Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). */ - @JsonProperty("privilege_model_version") private String privilegeModelVersion; /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */ - @JsonProperty("region") private String region; /** The storage root URL for metastore */ - @JsonProperty("storage_root") private String storageRoot; /** UUID of storage credential to access the metastore storage_root. */ - @JsonProperty("storage_root_credential_id") private String storageRootCredentialId; /** Name of the storage credential to access the metastore storage_root. */ - @JsonProperty("storage_root_credential_name") private String storageRootCredentialName; /** Time at which the metastore was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the metastore. */ - @JsonProperty("updated_by") private String updatedBy; public MetastoreInfo setCloud(String cloud) { @@ -146,12 +138,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public MetastoreInfo setDeltaSharingScope(MetastoreInfoDeltaSharingScope deltaSharingScope) { + public MetastoreInfo setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public MetastoreInfoDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } @@ -341,4 +333,74 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + MetastoreInfoPb toPb() { + MetastoreInfoPb pb = new MetastoreInfoPb(); + pb.setCloud(cloud); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDefaultDataAccessConfigId(defaultDataAccessConfigId); + pb.setDeltaSharingOrganizationName(deltaSharingOrganizationName); + pb.setDeltaSharingRecipientTokenLifetimeInSeconds(deltaSharingRecipientTokenLifetimeInSeconds); + pb.setDeltaSharingScope(deltaSharingScope); + pb.setExternalAccessEnabled(externalAccessEnabled); + pb.setGlobalMetastoreId(globalMetastoreId); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setPrivilegeModelVersion(privilegeModelVersion); + pb.setRegion(region); + pb.setStorageRoot(storageRoot); + pb.setStorageRootCredentialId(storageRootCredentialId); + pb.setStorageRootCredentialName(storageRootCredentialName); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static MetastoreInfo fromPb(MetastoreInfoPb pb) { + MetastoreInfo model = new MetastoreInfo(); + model.setCloud(pb.getCloud()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDefaultDataAccessConfigId(pb.getDefaultDataAccessConfigId()); + model.setDeltaSharingOrganizationName(pb.getDeltaSharingOrganizationName()); + model.setDeltaSharingRecipientTokenLifetimeInSeconds( + pb.getDeltaSharingRecipientTokenLifetimeInSeconds()); + model.setDeltaSharingScope(pb.getDeltaSharingScope()); + model.setExternalAccessEnabled(pb.getExternalAccessEnabled()); + model.setGlobalMetastoreId(pb.getGlobalMetastoreId()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPrivilegeModelVersion(pb.getPrivilegeModelVersion()); + model.setRegion(pb.getRegion()); + model.setStorageRoot(pb.getStorageRoot()); + model.setStorageRootCredentialId(pb.getStorageRootCredentialId()); + model.setStorageRootCredentialName(pb.getStorageRootCredentialName()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class MetastoreInfoSerializer extends JsonSerializer { + @Override + public void serialize(MetastoreInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MetastoreInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MetastoreInfoDeserializer extends JsonDeserializer { + @Override + public MetastoreInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MetastoreInfoPb pb = mapper.readValue(p, MetastoreInfoPb.class); + return MetastoreInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoPb.java new file mode 100755 index 000000000..5c70d5acf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoPb.java @@ -0,0 +1,319 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MetastoreInfoPb { + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("default_data_access_config_id") + private String defaultDataAccessConfigId; + + @JsonProperty("delta_sharing_organization_name") + private String deltaSharingOrganizationName; + + @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") + private Long deltaSharingRecipientTokenLifetimeInSeconds; + + @JsonProperty("delta_sharing_scope") + private DeltaSharingScopeEnum deltaSharingScope; + + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + + @JsonProperty("global_metastore_id") + private String globalMetastoreId; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("privilege_model_version") + private String privilegeModelVersion; + + @JsonProperty("region") + private String region; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("storage_root_credential_id") + private String storageRootCredentialId; + + @JsonProperty("storage_root_credential_name") + private String storageRootCredentialName; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public MetastoreInfoPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public MetastoreInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public MetastoreInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public MetastoreInfoPb setDefaultDataAccessConfigId(String defaultDataAccessConfigId) { + this.defaultDataAccessConfigId = defaultDataAccessConfigId; + return this; + } + + public String getDefaultDataAccessConfigId() { + return defaultDataAccessConfigId; + } + + public MetastoreInfoPb setDeltaSharingOrganizationName(String deltaSharingOrganizationName) { + this.deltaSharingOrganizationName = deltaSharingOrganizationName; + return this; + } + + public String getDeltaSharingOrganizationName() { + return deltaSharingOrganizationName; + } + + public MetastoreInfoPb setDeltaSharingRecipientTokenLifetimeInSeconds( + Long deltaSharingRecipientTokenLifetimeInSeconds) { + this.deltaSharingRecipientTokenLifetimeInSeconds = deltaSharingRecipientTokenLifetimeInSeconds; + return this; + } + + public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { + return deltaSharingRecipientTokenLifetimeInSeconds; + } + + public MetastoreInfoPb setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { + this.deltaSharingScope = deltaSharingScope; + return this; + } + + public DeltaSharingScopeEnum getDeltaSharingScope() { + return deltaSharingScope; + } + + public MetastoreInfoPb setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + + public MetastoreInfoPb setGlobalMetastoreId(String globalMetastoreId) { + this.globalMetastoreId = globalMetastoreId; + return this; + } + + public String getGlobalMetastoreId() { + return globalMetastoreId; + } + + public MetastoreInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public MetastoreInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public MetastoreInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public MetastoreInfoPb setPrivilegeModelVersion(String privilegeModelVersion) { + this.privilegeModelVersion = privilegeModelVersion; + return this; + } + + public String getPrivilegeModelVersion() { + return privilegeModelVersion; + } + + public MetastoreInfoPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public MetastoreInfoPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public MetastoreInfoPb setStorageRootCredentialId(String storageRootCredentialId) { + this.storageRootCredentialId = storageRootCredentialId; + return this; + } + + public String getStorageRootCredentialId() { + return storageRootCredentialId; + } + + public MetastoreInfoPb setStorageRootCredentialName(String storageRootCredentialName) { + this.storageRootCredentialName = storageRootCredentialName; + return this; + } + + public String getStorageRootCredentialName() { + return storageRootCredentialName; + } + + public MetastoreInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public MetastoreInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MetastoreInfoPb that = (MetastoreInfoPb) o; + return Objects.equals(cloud, that.cloud) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(defaultDataAccessConfigId, that.defaultDataAccessConfigId) + && Objects.equals(deltaSharingOrganizationName, that.deltaSharingOrganizationName) + && Objects.equals( + deltaSharingRecipientTokenLifetimeInSeconds, + that.deltaSharingRecipientTokenLifetimeInSeconds) + && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(externalAccessEnabled, that.externalAccessEnabled) + && Objects.equals(globalMetastoreId, that.globalMetastoreId) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(privilegeModelVersion, that.privilegeModelVersion) + && Objects.equals(region, that.region) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(storageRootCredentialId, that.storageRootCredentialId) + && Objects.equals(storageRootCredentialName, that.storageRootCredentialName) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + cloud, + createdAt, + createdBy, + defaultDataAccessConfigId, + deltaSharingOrganizationName, + deltaSharingRecipientTokenLifetimeInSeconds, + deltaSharingScope, + externalAccessEnabled, + globalMetastoreId, + metastoreId, + name, + owner, + privilegeModelVersion, + region, + storageRoot, + storageRootCredentialId, + storageRootCredentialName, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(MetastoreInfoPb.class) + .add("cloud", cloud) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("defaultDataAccessConfigId", defaultDataAccessConfigId) + .add("deltaSharingOrganizationName", deltaSharingOrganizationName) + .add( + "deltaSharingRecipientTokenLifetimeInSeconds", + deltaSharingRecipientTokenLifetimeInSeconds) + .add("deltaSharingScope", deltaSharingScope) + .add("externalAccessEnabled", externalAccessEnabled) + .add("globalMetastoreId", globalMetastoreId) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("privilegeModelVersion", privilegeModelVersion) + .add("region", region) + .add("storageRoot", storageRoot) + .add("storageRootCredentialId", storageRootCredentialId) + .add("storageRootCredentialName", storageRootCredentialName) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index c25f726bd..f468b89cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -114,9 +114,18 @@ public MetastoreInfo get(GetMetastoreRequest request) { * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. */ - public Iterable list() { + public Iterable list(ListMetastoresRequest request) { return new Paginator<>( - null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null); + request, + impl::list, + ListMetastoresResponse::getMetastores, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java index 87e65c5ac..2271a060b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java @@ -22,7 +22,7 @@ public void assign(CreateMetastoreAssignment request) { String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, AssignResponse.class); @@ -36,7 +36,7 @@ public MetastoreInfo create(CreateMetastore request) { String path = "/api/2.1/unity-catalog/metastores"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, MetastoreInfo.class); @@ -62,7 +62,7 @@ public void delete(DeleteMetastoreRequest request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -75,7 +75,7 @@ public MetastoreInfo get(GetMetastoreRequest request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, MetastoreInfo.class); } catch (IOException e) { @@ -84,10 +84,11 @@ public MetastoreInfo get(GetMetastoreRequest request) { } @Override - public ListMetastoresResponse list() { + public ListMetastoresResponse list(ListMetastoresRequest request) { String path = "/api/2.1/unity-catalog/metastores"; try { Request req = new Request("GET", path); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListMetastoresResponse.class); } catch (IOException e) { @@ -113,7 +114,7 @@ public void unassign(UnassignRequest request) { String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, UnassignResponse.class); } catch (IOException e) { @@ -126,7 +127,7 @@ public MetastoreInfo update(UpdateMetastore request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, MetastoreInfo.class); @@ -141,7 +142,7 @@ public void updateAssignment(UpdateMetastoreAssignment request) { String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateAssignmentResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java index ab8ed1a27..bfad43db3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java @@ -70,7 +70,7 @@ public interface MetastoresService { * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. */ - ListMetastoresResponse list(); + ListMetastoresResponse list(ListMetastoresRequest listMetastoresRequest); /** * Get a metastore summary. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java index 8dbd67ae1..667a7c092 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java @@ -4,75 +4,72 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ModelVersionInfo.ModelVersionInfoSerializer.class) +@JsonDeserialize(using = ModelVersionInfo.ModelVersionInfoDeserializer.class) public class ModelVersionInfo { /** List of aliases associated with the model version */ - @JsonProperty("aliases") private Collection aliases; /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** The name of the catalog containing the model version */ - @JsonProperty("catalog_name") private String catalogName; /** The comment attached to the model version */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("created_at") private Long createdAt; /** The identifier of the user who created the model version */ - @JsonProperty("created_by") private String createdBy; /** The unique identifier of the model version */ - @JsonProperty("id") private String id; /** The unique identifier of the metastore containing the model version */ - @JsonProperty("metastore_id") private String metastoreId; /** The name of the parent registered model of the model version, relative to parent schema */ - @JsonProperty("model_name") private String modelName; /** Model version dependencies, for feature-store packaged models */ - @JsonProperty("model_version_dependencies") private DependencyList modelVersionDependencies; /** * MLflow run ID used when creating the model version, if ``source`` was generated by an * experiment run stored in an MLflow tracking server */ - @JsonProperty("run_id") private String runId; /** * ID of the Databricks workspace containing the MLflow run that generated this model version, if * applicable */ - @JsonProperty("run_workspace_id") private Long runWorkspaceId; /** The name of the schema containing the model version, relative to parent catalog */ - @JsonProperty("schema_name") private String schemaName; /** URI indicating the location of the source artifacts (files) for the model version */ - @JsonProperty("source") private String source; /** @@ -81,23 +78,18 @@ public class ModelVersionInfo { * version is finalized. Only model versions in READY status can be loaded for inference or * served. */ - @JsonProperty("status") private ModelVersionInfoStatus status; /** The storage location on the cloud under which model version data files are stored */ - @JsonProperty("storage_location") private String storageLocation; /** */ - @JsonProperty("updated_at") private Long updatedAt; /** The identifier of the user who updated the model version last time */ - @JsonProperty("updated_by") private String updatedBy; /** Integer model version number, used to reference the model version in API requests. */ - @JsonProperty("version") private Long version; public ModelVersionInfo setAliases(Collection aliases) { @@ -345,4 +337,74 @@ public String toString() { .add("version", version) .toString(); } + + ModelVersionInfoPb toPb() { + ModelVersionInfoPb pb = new ModelVersionInfoPb(); + pb.setAliases(aliases); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setId(id); + pb.setMetastoreId(metastoreId); + pb.setModelName(modelName); + pb.setModelVersionDependencies(modelVersionDependencies); + pb.setRunId(runId); + pb.setRunWorkspaceId(runWorkspaceId); + pb.setSchemaName(schemaName); + pb.setSource(source); + pb.setStatus(status); + pb.setStorageLocation(storageLocation); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setVersion(version); + + return pb; + } + + static ModelVersionInfo fromPb(ModelVersionInfoPb pb) { + ModelVersionInfo model = new ModelVersionInfo(); + model.setAliases(pb.getAliases()); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setId(pb.getId()); + model.setMetastoreId(pb.getMetastoreId()); + model.setModelName(pb.getModelName()); + model.setModelVersionDependencies(pb.getModelVersionDependencies()); + model.setRunId(pb.getRunId()); + model.setRunWorkspaceId(pb.getRunWorkspaceId()); + model.setSchemaName(pb.getSchemaName()); + model.setSource(pb.getSource()); + model.setStatus(pb.getStatus()); + model.setStorageLocation(pb.getStorageLocation()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ModelVersionInfoSerializer extends JsonSerializer { + @Override + public void serialize(ModelVersionInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelVersionInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelVersionInfoDeserializer extends JsonDeserializer { + @Override + public ModelVersionInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelVersionInfoPb pb = mapper.readValue(p, ModelVersionInfoPb.class); + return ModelVersionInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoPb.java new file mode 100755 index 000000000..b962f6be9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoPb.java @@ -0,0 +1,315 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ModelVersionInfoPb { + @JsonProperty("aliases") + private Collection aliases; + + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("id") + private String id; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("model_version_dependencies") + private DependencyList modelVersionDependencies; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_workspace_id") + private Long runWorkspaceId; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("source") + private String source; + + @JsonProperty("status") + private ModelVersionInfoStatus status; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("version") + private Long version; + + public ModelVersionInfoPb setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public ModelVersionInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public ModelVersionInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ModelVersionInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ModelVersionInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ModelVersionInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ModelVersionInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ModelVersionInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ModelVersionInfoPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ModelVersionInfoPb setModelVersionDependencies(DependencyList modelVersionDependencies) { + this.modelVersionDependencies = modelVersionDependencies; + return this; + } + + public DependencyList getModelVersionDependencies() { + return modelVersionDependencies; + } + + public ModelVersionInfoPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ModelVersionInfoPb setRunWorkspaceId(Long runWorkspaceId) { + this.runWorkspaceId = runWorkspaceId; + return this; + } + + public Long getRunWorkspaceId() { + return runWorkspaceId; + } + + public ModelVersionInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public ModelVersionInfoPb setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public ModelVersionInfoPb setStatus(ModelVersionInfoStatus status) { + this.status = status; + return this; + } + + public ModelVersionInfoStatus getStatus() { + return status; + } + + public ModelVersionInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public ModelVersionInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ModelVersionInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ModelVersionInfoPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelVersionInfoPb that = (ModelVersionInfoPb) o; + return Objects.equals(aliases, that.aliases) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(id, that.id) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(modelName, that.modelName) + && Objects.equals(modelVersionDependencies, that.modelVersionDependencies) + && Objects.equals(runId, that.runId) + && Objects.equals(runWorkspaceId, that.runWorkspaceId) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(source, that.source) + && Objects.equals(status, that.status) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash( + aliases, + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + id, + metastoreId, + modelName, + modelVersionDependencies, + runId, + runWorkspaceId, + schemaName, + source, + status, + storageLocation, + updatedAt, + updatedBy, + version); + } + + @Override + public String toString() { + return new ToStringer(ModelVersionInfoPb.class) + .add("aliases", aliases) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("id", id) + .add("metastoreId", metastoreId) + .add("modelName", modelName) + .add("modelVersionDependencies", modelVersionDependencies) + .add("runId", runId) + .add("runWorkspaceId", runWorkspaceId) + .add("schemaName", schemaName) + .add("source", source) + .add("status", status) + .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java index b847105e0..fbee9f415 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java @@ -24,7 +24,7 @@ public void delete(DeleteModelVersionRequest request) { request.getFullName(), request.getVersion()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -39,7 +39,7 @@ public ModelVersionInfo get(GetModelVersionRequest request) { request.getFullName(), request.getVersion()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ModelVersionInfo.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public ModelVersionInfo getByAlias(GetByAliasRequest request) { request.getFullName(), request.getAlias()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ModelVersionInfo.class); } catch (IOException e) { @@ -68,7 +68,7 @@ public ListModelVersionsResponse list(ListModelVersionsRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s/versions", request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListModelVersionsResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public ModelVersionInfo update(UpdateModelVersionRequest request) { request.getFullName(), request.getVersion()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ModelVersionInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java index c8135aa11..7cf74639d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorCronSchedule.MonitorCronScheduleSerializer.class) +@JsonDeserialize(using = MonitorCronSchedule.MonitorCronScheduleDeserializer.class) public class MonitorCronSchedule { /** Read only field that indicates whether a schedule is paused or not. */ - @JsonProperty("pause_status") private MonitorCronSchedulePauseStatus pauseStatus; /** @@ -19,11 +29,9 @@ public class MonitorCronSchedule { *

[examples]: * https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html */ - @JsonProperty("quartz_cron_expression") private String quartzCronExpression; /** The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression. */ - @JsonProperty("timezone_id") private String timezoneId; public MonitorCronSchedule setPauseStatus(MonitorCronSchedulePauseStatus pauseStatus) { @@ -76,4 +84,43 @@ public String toString() { .add("timezoneId", timezoneId) .toString(); } + + MonitorCronSchedulePb toPb() { + MonitorCronSchedulePb pb = new MonitorCronSchedulePb(); + pb.setPauseStatus(pauseStatus); + pb.setQuartzCronExpression(quartzCronExpression); + pb.setTimezoneId(timezoneId); + + return pb; + } + + static MonitorCronSchedule fromPb(MonitorCronSchedulePb pb) { + MonitorCronSchedule model = new MonitorCronSchedule(); + model.setPauseStatus(pb.getPauseStatus()); + model.setQuartzCronExpression(pb.getQuartzCronExpression()); + model.setTimezoneId(pb.getTimezoneId()); + + return model; + } + + public static class MonitorCronScheduleSerializer extends JsonSerializer { + @Override + public void serialize(MonitorCronSchedule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorCronSchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorCronScheduleDeserializer + extends JsonDeserializer { + @Override + public MonitorCronSchedule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorCronSchedulePb pb = mapper.readValue(p, MonitorCronSchedulePb.class); + return MonitorCronSchedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePb.java new file mode 100755 index 000000000..bd1c97a65 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MonitorCronSchedulePb { + @JsonProperty("pause_status") + private MonitorCronSchedulePauseStatus pauseStatus; + + @JsonProperty("quartz_cron_expression") + private String quartzCronExpression; + + @JsonProperty("timezone_id") + private String timezoneId; + + public MonitorCronSchedulePb setPauseStatus(MonitorCronSchedulePauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public MonitorCronSchedulePauseStatus getPauseStatus() { + return pauseStatus; + } + + public MonitorCronSchedulePb setQuartzCronExpression(String quartzCronExpression) { + this.quartzCronExpression = quartzCronExpression; + return this; + } + + public String getQuartzCronExpression() { + return quartzCronExpression; + } + + public MonitorCronSchedulePb setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorCronSchedulePb that = (MonitorCronSchedulePb) o; + return Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(quartzCronExpression, that.quartzCronExpression) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus, quartzCronExpression, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(MonitorCronSchedulePb.class) + .add("pauseStatus", pauseStatus) + .add("quartzCronExpression", quartzCronExpression) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java index 814e2b9ce..4ba4bcaa7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = MonitorDataClassificationConfig.MonitorDataClassificationConfigSerializer.class) +@JsonDeserialize( + using = MonitorDataClassificationConfig.MonitorDataClassificationConfigDeserializer.class) public class MonitorDataClassificationConfig { /** Whether data classification is enabled. */ - @JsonProperty("enabled") private Boolean enabled; public MonitorDataClassificationConfig setEnabled(Boolean enabled) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(MonitorDataClassificationConfig.class).add("enabled", enabled).toString(); } + + MonitorDataClassificationConfigPb toPb() { + MonitorDataClassificationConfigPb pb = new MonitorDataClassificationConfigPb(); + pb.setEnabled(enabled); + + return pb; + } + + static MonitorDataClassificationConfig fromPb(MonitorDataClassificationConfigPb pb) { + MonitorDataClassificationConfig model = new MonitorDataClassificationConfig(); + model.setEnabled(pb.getEnabled()); + + return model; + } + + public static class MonitorDataClassificationConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + MonitorDataClassificationConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorDataClassificationConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorDataClassificationConfigDeserializer + extends JsonDeserializer { + @Override + public MonitorDataClassificationConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorDataClassificationConfigPb pb = + mapper.readValue(p, MonitorDataClassificationConfigPb.class); + return MonitorDataClassificationConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfigPb.java new file mode 100755 index 000000000..0dd0bad3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfigPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MonitorDataClassificationConfigPb { + @JsonProperty("enabled") + private Boolean enabled; + + public MonitorDataClassificationConfigPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorDataClassificationConfigPb that = (MonitorDataClassificationConfigPb) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(MonitorDataClassificationConfigPb.class) + .add("enabled", enabled) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java index d34b42ae2..059f48ec2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorDestination.MonitorDestinationSerializer.class) +@JsonDeserialize(using = MonitorDestination.MonitorDestinationDeserializer.class) public class MonitorDestination { /** * The list of email addresses to send the notification to. A maximum of 5 email addresses is * supported. */ - @JsonProperty("email_addresses") private Collection emailAddresses; public MonitorDestination setEmailAddresses(Collection emailAddresses) { @@ -45,4 +55,38 @@ public String toString() { .add("emailAddresses", emailAddresses) .toString(); } + + MonitorDestinationPb toPb() { + MonitorDestinationPb pb = new MonitorDestinationPb(); + pb.setEmailAddresses(emailAddresses); + + return pb; + } + + static MonitorDestination fromPb(MonitorDestinationPb pb) { + MonitorDestination model = new MonitorDestination(); + model.setEmailAddresses(pb.getEmailAddresses()); + + return model; + } + + public static class MonitorDestinationSerializer extends JsonSerializer { + @Override + public void serialize(MonitorDestination value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorDestinationDeserializer extends JsonDeserializer { + @Override + public MonitorDestination deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorDestinationPb pb = mapper.readValue(p, MonitorDestinationPb.class); + return MonitorDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinationPb.java new file mode 100755 index 000000000..981abcfb0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinationPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorDestinationPb { + @JsonProperty("email_addresses") + private Collection emailAddresses; + + public MonitorDestinationPb setEmailAddresses(Collection emailAddresses) { + this.emailAddresses = emailAddresses; + return this; + } + + public Collection getEmailAddresses() { + return emailAddresses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorDestinationPb that = (MonitorDestinationPb) o; + return Objects.equals(emailAddresses, that.emailAddresses); + } + + @Override + public int hashCode() { + return Objects.hash(emailAddresses); + } + + @Override + public String toString() { + return new ToStringer(MonitorDestinationPb.class) + .add("emailAddresses", emailAddresses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java index 5ccc716a2..ec900d4bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java @@ -4,33 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorInferenceLog.MonitorInferenceLogSerializer.class) +@JsonDeserialize(using = MonitorInferenceLog.MonitorInferenceLogDeserializer.class) public class MonitorInferenceLog { /** * Granularities for aggregating data into time windows based on their timestamp. Currently the * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}. */ - @JsonProperty("granularities") private Collection granularities; /** Optional column that contains the ground truth for the prediction. */ - @JsonProperty("label_col") private String labelCol; /** * Column that contains the id of the model generating the predictions. Metrics will be computed * per model id by default, and also across all model ids. */ - @JsonProperty("model_id_col") private String modelIdCol; /** Column that contains the output/prediction from the model. */ - @JsonProperty("prediction_col") private String predictionCol; /** @@ -38,14 +45,12 @@ public class MonitorInferenceLog { * problem type. The values in this column should be a map, mapping each class label to the * prediction probability for a given sample. The map should be of PySpark MapType(). */ - @JsonProperty("prediction_proba_col") private String predictionProbaCol; /** * Problem type the model aims to solve. Determines the type of model-quality metrics that will be * computed. */ - @JsonProperty("problem_type") private MonitorInferenceLogProblemType problemType; /** @@ -56,7 +61,6 @@ public class MonitorInferenceLog { *

[function]: * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html */ - @JsonProperty("timestamp_col") private String timestampCol; public MonitorInferenceLog setGranularities(Collection granularities) { @@ -160,4 +164,51 @@ public String toString() { .add("timestampCol", timestampCol) .toString(); } + + MonitorInferenceLogPb toPb() { + MonitorInferenceLogPb pb = new MonitorInferenceLogPb(); + pb.setGranularities(granularities); + pb.setLabelCol(labelCol); + pb.setModelIdCol(modelIdCol); + pb.setPredictionCol(predictionCol); + pb.setPredictionProbaCol(predictionProbaCol); + pb.setProblemType(problemType); + pb.setTimestampCol(timestampCol); + + return pb; + } + + static MonitorInferenceLog fromPb(MonitorInferenceLogPb pb) { + MonitorInferenceLog model = new MonitorInferenceLog(); + model.setGranularities(pb.getGranularities()); + model.setLabelCol(pb.getLabelCol()); + model.setModelIdCol(pb.getModelIdCol()); + model.setPredictionCol(pb.getPredictionCol()); + model.setPredictionProbaCol(pb.getPredictionProbaCol()); + model.setProblemType(pb.getProblemType()); + model.setTimestampCol(pb.getTimestampCol()); + + return model; + } + + public static class MonitorInferenceLogSerializer extends JsonSerializer { + @Override + public void serialize(MonitorInferenceLog value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorInferenceLogPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorInferenceLogDeserializer + extends JsonDeserializer { + @Override + public MonitorInferenceLog deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorInferenceLogPb pb = mapper.readValue(p, MonitorInferenceLogPb.class); + return MonitorInferenceLog.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogPb.java new file mode 100755 index 000000000..e4bb202be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogPb.java @@ -0,0 +1,135 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorInferenceLogPb { + @JsonProperty("granularities") + private Collection granularities; + + @JsonProperty("label_col") + private String labelCol; + + @JsonProperty("model_id_col") + private String modelIdCol; + + @JsonProperty("prediction_col") + private String predictionCol; + + @JsonProperty("prediction_proba_col") + private String predictionProbaCol; + + @JsonProperty("problem_type") + private MonitorInferenceLogProblemType problemType; + + @JsonProperty("timestamp_col") + private String timestampCol; + + public MonitorInferenceLogPb setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public MonitorInferenceLogPb setLabelCol(String labelCol) { + this.labelCol = labelCol; + return this; + } + + public String getLabelCol() { + return labelCol; + } + + public MonitorInferenceLogPb setModelIdCol(String modelIdCol) { + this.modelIdCol = modelIdCol; + return this; + } + + public String getModelIdCol() { + return modelIdCol; + } + + public MonitorInferenceLogPb setPredictionCol(String predictionCol) { + this.predictionCol = predictionCol; + return this; + } + + public String getPredictionCol() { + return predictionCol; + } + + public MonitorInferenceLogPb setPredictionProbaCol(String predictionProbaCol) { + this.predictionProbaCol = predictionProbaCol; + return this; + } + + public String getPredictionProbaCol() { + return predictionProbaCol; + } + + public MonitorInferenceLogPb setProblemType(MonitorInferenceLogProblemType problemType) { + this.problemType = problemType; + return this; + } + + public MonitorInferenceLogProblemType getProblemType() { + return problemType; + } + + public MonitorInferenceLogPb setTimestampCol(String timestampCol) { + this.timestampCol = timestampCol; + return this; + } + + public String getTimestampCol() { + return timestampCol; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorInferenceLogPb that = (MonitorInferenceLogPb) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(labelCol, that.labelCol) + && Objects.equals(modelIdCol, that.modelIdCol) + && Objects.equals(predictionCol, that.predictionCol) + && Objects.equals(predictionProbaCol, that.predictionProbaCol) + && Objects.equals(problemType, that.problemType) + && Objects.equals(timestampCol, that.timestampCol); + } + + @Override + public int hashCode() { + return Objects.hash( + granularities, + labelCol, + modelIdCol, + predictionCol, + predictionProbaCol, + problemType, + timestampCol); + } + + @Override + public String toString() { + return new ToStringer(MonitorInferenceLogPb.class) + .add("granularities", granularities) + .add("labelCol", labelCol) + .add("modelIdCol", modelIdCol) + .add("predictionCol", predictionCol) + .add("predictionProbaCol", predictionProbaCol) + .add("problemType", problemType) + .add("timestampCol", timestampCol) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java index aac4fa412..e70921a2e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorInfo.MonitorInfoSerializer.class) +@JsonDeserialize(using = MonitorInfo.MonitorInfoDeserializer.class) public class MonitorInfo { /** The directory to store monitoring assets (e.g. dashboard, metric tables). */ - @JsonProperty("assets_dir") private String assetsDir; /** * Name of the baseline table from which drift metrics are computed from. Columns in the monitored * table should also be present in the baseline table. */ - @JsonProperty("baseline_table_name") private String baselineTableName; /** @@ -26,56 +35,45 @@ public class MonitorInfo { * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across * time windows). */ - @JsonProperty("custom_metrics") private Collection customMetrics; /** * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in * PENDING state. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The data classification config for the monitor. */ - @JsonProperty("data_classification_config") private MonitorDataClassificationConfig dataClassificationConfig; /** * The full name of the drift metrics table. Format: * __catalog_name__.__schema_name__.__table_name__. */ - @JsonProperty("drift_metrics_table_name") private String driftMetricsTableName; /** Configuration for monitoring inference logs. */ - @JsonProperty("inference_log") private MonitorInferenceLog inferenceLog; /** The latest failure message of the monitor (if any). */ - @JsonProperty("latest_monitor_failure_msg") private String latestMonitorFailureMsg; /** The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted. */ - @JsonProperty("monitor_version") private String monitorVersion; /** The notification settings for the monitor. */ - @JsonProperty("notifications") private MonitorNotifications notifications; /** Schema where output metric tables are created. */ - @JsonProperty("output_schema_name") private String outputSchemaName; /** * The full name of the profile metrics table. Format: * __catalog_name__.__schema_name__.__table_name__. */ - @JsonProperty("profile_metrics_table_name") private String profileMetricsTableName; /** The schedule for automatically updating and refreshing metric tables. */ - @JsonProperty("schedule") private MonitorCronSchedule schedule; /** @@ -84,25 +82,20 @@ public class MonitorInfo { * complements. For high-cardinality columns, only the top 100 unique values by frequency will * generate slices. */ - @JsonProperty("slicing_exprs") private Collection slicingExprs; /** Configuration for monitoring snapshot tables. */ - @JsonProperty("snapshot") private MonitorSnapshot snapshot; /** The status of the monitor. */ - @JsonProperty("status") private MonitorInfoStatus status; /** * The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__. */ - @JsonProperty("table_name") private String tableName; /** Configuration for monitoring time series tables. */ - @JsonProperty("time_series") private MonitorTimeSeries timeSeries; public MonitorInfo setAssetsDir(String assetsDir) { @@ -339,4 +332,71 @@ public String toString() { .add("timeSeries", timeSeries) .toString(); } + + MonitorInfoPb toPb() { + MonitorInfoPb pb = new MonitorInfoPb(); + pb.setAssetsDir(assetsDir); + pb.setBaselineTableName(baselineTableName); + pb.setCustomMetrics(customMetrics); + pb.setDashboardId(dashboardId); + pb.setDataClassificationConfig(dataClassificationConfig); + pb.setDriftMetricsTableName(driftMetricsTableName); + pb.setInferenceLog(inferenceLog); + pb.setLatestMonitorFailureMsg(latestMonitorFailureMsg); + pb.setMonitorVersion(monitorVersion); + pb.setNotifications(notifications); + pb.setOutputSchemaName(outputSchemaName); + pb.setProfileMetricsTableName(profileMetricsTableName); + pb.setSchedule(schedule); + pb.setSlicingExprs(slicingExprs); + pb.setSnapshot(snapshot); + pb.setStatus(status); + pb.setTableName(tableName); + pb.setTimeSeries(timeSeries); + + return pb; + } + + static MonitorInfo fromPb(MonitorInfoPb pb) { + MonitorInfo model = new MonitorInfo(); + model.setAssetsDir(pb.getAssetsDir()); + model.setBaselineTableName(pb.getBaselineTableName()); + model.setCustomMetrics(pb.getCustomMetrics()); + model.setDashboardId(pb.getDashboardId()); + model.setDataClassificationConfig(pb.getDataClassificationConfig()); + model.setDriftMetricsTableName(pb.getDriftMetricsTableName()); + model.setInferenceLog(pb.getInferenceLog()); + model.setLatestMonitorFailureMsg(pb.getLatestMonitorFailureMsg()); + model.setMonitorVersion(pb.getMonitorVersion()); + model.setNotifications(pb.getNotifications()); + model.setOutputSchemaName(pb.getOutputSchemaName()); + model.setProfileMetricsTableName(pb.getProfileMetricsTableName()); + model.setSchedule(pb.getSchedule()); + model.setSlicingExprs(pb.getSlicingExprs()); + model.setSnapshot(pb.getSnapshot()); + model.setStatus(pb.getStatus()); + model.setTableName(pb.getTableName()); + model.setTimeSeries(pb.getTimeSeries()); + + return model; + } + + public static class MonitorInfoSerializer extends JsonSerializer { + @Override + public void serialize(MonitorInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorInfoDeserializer extends JsonDeserializer { + @Override + public MonitorInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorInfoPb pb = mapper.readValue(p, MonitorInfoPb.class); + return MonitorInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoPb.java new file mode 100755 index 000000000..51bcb89b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoPb.java @@ -0,0 +1,301 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorInfoPb { + @JsonProperty("assets_dir") + private String assetsDir; + + @JsonProperty("baseline_table_name") + private String baselineTableName; + + @JsonProperty("custom_metrics") + private Collection customMetrics; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + @JsonProperty("drift_metrics_table_name") + private String driftMetricsTableName; + + @JsonProperty("inference_log") + private MonitorInferenceLog inferenceLog; + + @JsonProperty("latest_monitor_failure_msg") + private String latestMonitorFailureMsg; + + @JsonProperty("monitor_version") + private String monitorVersion; + + @JsonProperty("notifications") + private MonitorNotifications notifications; + + @JsonProperty("output_schema_name") + private String outputSchemaName; + + @JsonProperty("profile_metrics_table_name") + private String profileMetricsTableName; + + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + @JsonProperty("snapshot") + private MonitorSnapshot snapshot; + + @JsonProperty("status") + private MonitorInfoStatus status; + + @JsonProperty("table_name") + private String tableName; + + @JsonProperty("time_series") + private MonitorTimeSeries timeSeries; + + public MonitorInfoPb setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public MonitorInfoPb setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public MonitorInfoPb setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public MonitorInfoPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public MonitorInfoPb setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public MonitorInfoPb setDriftMetricsTableName(String driftMetricsTableName) { + this.driftMetricsTableName = driftMetricsTableName; + return this; + } + + public String getDriftMetricsTableName() { + return driftMetricsTableName; + } + + public MonitorInfoPb setInferenceLog(MonitorInferenceLog inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLog getInferenceLog() { + return inferenceLog; + } + + public MonitorInfoPb setLatestMonitorFailureMsg(String latestMonitorFailureMsg) { + this.latestMonitorFailureMsg = latestMonitorFailureMsg; + return this; + } + + public String getLatestMonitorFailureMsg() { + return latestMonitorFailureMsg; + } + + public MonitorInfoPb setMonitorVersion(String monitorVersion) { + this.monitorVersion = monitorVersion; + return this; + } + + public String getMonitorVersion() { + return monitorVersion; + } + + public MonitorInfoPb setNotifications(MonitorNotifications notifications) { + this.notifications = notifications; + return this; + } + + public MonitorNotifications getNotifications() { + return notifications; + } + + public MonitorInfoPb setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public MonitorInfoPb setProfileMetricsTableName(String profileMetricsTableName) { + this.profileMetricsTableName = profileMetricsTableName; + return this; + } + + public String getProfileMetricsTableName() { + return profileMetricsTableName; + } + + public MonitorInfoPb setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public MonitorInfoPb setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public MonitorInfoPb setSnapshot(MonitorSnapshot snapshot) { + this.snapshot = snapshot; + return this; + } + + public MonitorSnapshot getSnapshot() { + return snapshot; + } + + public MonitorInfoPb setStatus(MonitorInfoStatus status) { + this.status = status; + return this; + } + + public MonitorInfoStatus getStatus() { + return status; + } + + public MonitorInfoPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public MonitorInfoPb setTimeSeries(MonitorTimeSeries timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeries getTimeSeries() { + return timeSeries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorInfoPb that = (MonitorInfoPb) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(driftMetricsTableName, that.driftMetricsTableName) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(latestMonitorFailureMsg, that.latestMonitorFailureMsg) + && Objects.equals(monitorVersion, that.monitorVersion) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(profileMetricsTableName, that.profileMetricsTableName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(status, that.status) + && Objects.equals(tableName, that.tableName) + && Objects.equals(timeSeries, that.timeSeries); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dashboardId, + dataClassificationConfig, + driftMetricsTableName, + inferenceLog, + latestMonitorFailureMsg, + monitorVersion, + notifications, + outputSchemaName, + profileMetricsTableName, + schedule, + slicingExprs, + snapshot, + status, + tableName, + timeSeries); + } + + @Override + public String toString() { + return new ToStringer(MonitorInfoPb.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dashboardId", dashboardId) + .add("dataClassificationConfig", dataClassificationConfig) + .add("driftMetricsTableName", driftMetricsTableName) + .add("inferenceLog", inferenceLog) + .add("latestMonitorFailureMsg", latestMonitorFailureMsg) + .add("monitorVersion", monitorVersion) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("profileMetricsTableName", profileMetricsTableName) + .add("schedule", schedule) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("status", status) + .add("tableName", tableName) + .add("timeSeries", timeSeries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java index 1f6cdd963..89b55bb8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorMetric.MonitorMetricSerializer.class) +@JsonDeserialize(using = MonitorMetric.MonitorMetricDeserializer.class) public class MonitorMetric { /** * Jinja template for a SQL expression that specifies how to compute the metric. See [create @@ -17,22 +28,18 @@ public class MonitorMetric { *

[create metric definition]: * https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition */ - @JsonProperty("definition") private String definition; /** * A list of column names in the input table the metric should be computed for. Can use * ``":table"`` to indicate that the metric needs information from multiple columns. */ - @JsonProperty("input_columns") private Collection inputColumns; /** Name of the metric in the output tables. */ - @JsonProperty("name") private String name; /** The output type of the custom metric. */ - @JsonProperty("output_data_type") private String outputDataType; /** @@ -44,7 +51,6 @@ public class MonitorMetric { * columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate * metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics */ - @JsonProperty("type") private MonitorMetricType typeValue; public MonitorMetric setDefinition(String definition) { @@ -119,4 +125,45 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + MonitorMetricPb toPb() { + MonitorMetricPb pb = new MonitorMetricPb(); + pb.setDefinition(definition); + pb.setInputColumns(inputColumns); + pb.setName(name); + pb.setOutputDataType(outputDataType); + pb.setType(typeValue); + + return pb; + } + + static MonitorMetric fromPb(MonitorMetricPb pb) { + MonitorMetric model = new MonitorMetric(); + model.setDefinition(pb.getDefinition()); + model.setInputColumns(pb.getInputColumns()); + model.setName(pb.getName()); + model.setOutputDataType(pb.getOutputDataType()); + model.setType(pb.getType()); + + return model; + } + + public static class MonitorMetricSerializer extends JsonSerializer { + @Override + public void serialize(MonitorMetric value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorMetricPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorMetricDeserializer extends JsonDeserializer { + @Override + public MonitorMetric deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorMetricPb pb = mapper.readValue(p, MonitorMetricPb.class); + return MonitorMetric.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricPb.java new file mode 100755 index 000000000..ef7f71b64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorMetricPb { + @JsonProperty("definition") + private String definition; + + @JsonProperty("input_columns") + private Collection inputColumns; + + @JsonProperty("name") + private String name; + + @JsonProperty("output_data_type") + private String outputDataType; + + @JsonProperty("type") + private MonitorMetricType typeValue; + + public MonitorMetricPb setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public MonitorMetricPb setInputColumns(Collection inputColumns) { + this.inputColumns = inputColumns; + return this; + } + + public Collection getInputColumns() { + return inputColumns; + } + + public MonitorMetricPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public MonitorMetricPb setOutputDataType(String outputDataType) { + this.outputDataType = outputDataType; + return this; + } + + public String getOutputDataType() { + return outputDataType; + } + + public MonitorMetricPb setType(MonitorMetricType typeValue) { + this.typeValue = typeValue; + return this; + } + + public MonitorMetricType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorMetricPb that = (MonitorMetricPb) o; + return Objects.equals(definition, that.definition) + && Objects.equals(inputColumns, that.inputColumns) + && Objects.equals(name, that.name) + && Objects.equals(outputDataType, that.outputDataType) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(definition, inputColumns, name, outputDataType, typeValue); + } + + @Override + public String toString() { + return new ToStringer(MonitorMetricPb.class) + .add("definition", definition) + .add("inputColumns", inputColumns) + .add("name", name) + .add("outputDataType", outputDataType) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java index 6586c8498..bc788d27a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorNotifications.MonitorNotificationsSerializer.class) +@JsonDeserialize(using = MonitorNotifications.MonitorNotificationsDeserializer.class) public class MonitorNotifications { /** Who to send notifications to on monitor failure. */ - @JsonProperty("on_failure") private MonitorDestination onFailure; /** Who to send notifications to when new data classification tags are detected. */ - @JsonProperty("on_new_classification_tag_detected") private MonitorDestination onNewClassificationTagDetected; public MonitorNotifications setOnFailure(MonitorDestination onFailure) { @@ -57,4 +66,42 @@ public String toString() { .add("onNewClassificationTagDetected", onNewClassificationTagDetected) .toString(); } + + MonitorNotificationsPb toPb() { + MonitorNotificationsPb pb = new MonitorNotificationsPb(); + pb.setOnFailure(onFailure); + pb.setOnNewClassificationTagDetected(onNewClassificationTagDetected); + + return pb; + } + + static MonitorNotifications fromPb(MonitorNotificationsPb pb) { + MonitorNotifications model = new MonitorNotifications(); + model.setOnFailure(pb.getOnFailure()); + model.setOnNewClassificationTagDetected(pb.getOnNewClassificationTagDetected()); + + return model; + } + + public static class MonitorNotificationsSerializer extends JsonSerializer { + @Override + public void serialize( + MonitorNotifications value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorNotificationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorNotificationsDeserializer + extends JsonDeserializer { + @Override + public MonitorNotifications deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorNotificationsPb pb = mapper.readValue(p, MonitorNotificationsPb.class); + return MonitorNotifications.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsPb.java new file mode 100755 index 000000000..c3cb9a349 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MonitorNotificationsPb { + @JsonProperty("on_failure") + private MonitorDestination onFailure; + + @JsonProperty("on_new_classification_tag_detected") + private MonitorDestination onNewClassificationTagDetected; + + public MonitorNotificationsPb setOnFailure(MonitorDestination onFailure) { + this.onFailure = onFailure; + return this; + } + + public MonitorDestination getOnFailure() { + return onFailure; + } + + public MonitorNotificationsPb setOnNewClassificationTagDetected( + MonitorDestination onNewClassificationTagDetected) { + this.onNewClassificationTagDetected = onNewClassificationTagDetected; + return this; + } + + public MonitorDestination getOnNewClassificationTagDetected() { + return onNewClassificationTagDetected; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorNotificationsPb that = (MonitorNotificationsPb) o; + return Objects.equals(onFailure, that.onFailure) + && Objects.equals(onNewClassificationTagDetected, that.onNewClassificationTagDetected); + } + + @Override + public int hashCode() { + return Objects.hash(onFailure, onNewClassificationTagDetected); + } + + @Override + public String toString() { + return new ToStringer(MonitorNotificationsPb.class) + .add("onFailure", onFailure) + .add("onNewClassificationTagDetected", onNewClassificationTagDetected) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java index 15094c0fb..c8a83f83b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java @@ -4,35 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorRefreshInfo.MonitorRefreshInfoSerializer.class) +@JsonDeserialize(using = MonitorRefreshInfo.MonitorRefreshInfoDeserializer.class) public class MonitorRefreshInfo { /** Time at which refresh operation completed (milliseconds since 1/1/1970 UTC). */ - @JsonProperty("end_time_ms") private Long endTimeMs; /** * An optional message to give insight into the current state of the job (e.g. FAILURE messages). */ - @JsonProperty("message") private String message; /** Unique id of the refresh operation. */ - @JsonProperty("refresh_id") private Long refreshId; /** Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC). */ - @JsonProperty("start_time_ms") private Long startTimeMs; /** The current state of the refresh. */ - @JsonProperty("state") private MonitorRefreshInfoState state; /** The method by which the refresh was triggered. */ - @JsonProperty("trigger") private MonitorRefreshInfoTrigger trigger; public MonitorRefreshInfo setEndTimeMs(Long endTimeMs) { @@ -118,4 +123,48 @@ public String toString() { .add("trigger", trigger) .toString(); } + + MonitorRefreshInfoPb toPb() { + MonitorRefreshInfoPb pb = new MonitorRefreshInfoPb(); + pb.setEndTimeMs(endTimeMs); + pb.setMessage(message); + pb.setRefreshId(refreshId); + pb.setStartTimeMs(startTimeMs); + pb.setState(state); + pb.setTrigger(trigger); + + return pb; + } + + static MonitorRefreshInfo fromPb(MonitorRefreshInfoPb pb) { + MonitorRefreshInfo model = new MonitorRefreshInfo(); + model.setEndTimeMs(pb.getEndTimeMs()); + model.setMessage(pb.getMessage()); + model.setRefreshId(pb.getRefreshId()); + model.setStartTimeMs(pb.getStartTimeMs()); + model.setState(pb.getState()); + model.setTrigger(pb.getTrigger()); + + return model; + } + + public static class MonitorRefreshInfoSerializer extends JsonSerializer { + @Override + public void serialize(MonitorRefreshInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorRefreshInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorRefreshInfoDeserializer extends JsonDeserializer { + @Override + public MonitorRefreshInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorRefreshInfoPb pb = mapper.readValue(p, MonitorRefreshInfoPb.class); + return MonitorRefreshInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoPb.java new file mode 100755 index 000000000..96e3f2576 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MonitorRefreshInfoPb { + @JsonProperty("end_time_ms") + private Long endTimeMs; + + @JsonProperty("message") + private String message; + + @JsonProperty("refresh_id") + private Long refreshId; + + @JsonProperty("start_time_ms") + private Long startTimeMs; + + @JsonProperty("state") + private MonitorRefreshInfoState state; + + @JsonProperty("trigger") + private MonitorRefreshInfoTrigger trigger; + + public MonitorRefreshInfoPb setEndTimeMs(Long endTimeMs) { + this.endTimeMs = endTimeMs; + return this; + } + + public Long getEndTimeMs() { + return endTimeMs; + } + + public MonitorRefreshInfoPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public MonitorRefreshInfoPb setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + public MonitorRefreshInfoPb setStartTimeMs(Long startTimeMs) { + this.startTimeMs = startTimeMs; + return this; + } + + public Long getStartTimeMs() { + return startTimeMs; + } + + public MonitorRefreshInfoPb setState(MonitorRefreshInfoState state) { + this.state = state; + return this; + } + + public MonitorRefreshInfoState getState() { + return state; + } + + public MonitorRefreshInfoPb setTrigger(MonitorRefreshInfoTrigger trigger) { + this.trigger = trigger; + return this; + } + + public MonitorRefreshInfoTrigger getTrigger() { + return trigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorRefreshInfoPb that = (MonitorRefreshInfoPb) o; + return Objects.equals(endTimeMs, that.endTimeMs) + && Objects.equals(message, that.message) + && Objects.equals(refreshId, that.refreshId) + && Objects.equals(startTimeMs, that.startTimeMs) + && Objects.equals(state, that.state) + && Objects.equals(trigger, that.trigger); + } + + @Override + public int hashCode() { + return Objects.hash(endTimeMs, message, refreshId, startTimeMs, state, trigger); + } + + @Override + public String toString() { + return new ToStringer(MonitorRefreshInfoPb.class) + .add("endTimeMs", endTimeMs) + .add("message", message) + .add("refreshId", refreshId) + .add("startTimeMs", startTimeMs) + .add("state", state) + .add("trigger", trigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponse.java index 195790d56..1b983d065 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorRefreshListResponse.MonitorRefreshListResponseSerializer.class) +@JsonDeserialize(using = MonitorRefreshListResponse.MonitorRefreshListResponseDeserializer.class) public class MonitorRefreshListResponse { /** List of refreshes. */ - @JsonProperty("refreshes") private Collection refreshes; public MonitorRefreshListResponse setRefreshes(Collection refreshes) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(MonitorRefreshListResponse.class).add("refreshes", refreshes).toString(); } + + MonitorRefreshListResponsePb toPb() { + MonitorRefreshListResponsePb pb = new MonitorRefreshListResponsePb(); + pb.setRefreshes(refreshes); + + return pb; + } + + static MonitorRefreshListResponse fromPb(MonitorRefreshListResponsePb pb) { + MonitorRefreshListResponse model = new MonitorRefreshListResponse(); + model.setRefreshes(pb.getRefreshes()); + + return model; + } + + public static class MonitorRefreshListResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + MonitorRefreshListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorRefreshListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorRefreshListResponseDeserializer + extends JsonDeserializer { + @Override + public MonitorRefreshListResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorRefreshListResponsePb pb = mapper.readValue(p, MonitorRefreshListResponsePb.class); + return MonitorRefreshListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponsePb.java new file mode 100755 index 000000000..585ba1065 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshListResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorRefreshListResponsePb { + @JsonProperty("refreshes") + private Collection refreshes; + + public MonitorRefreshListResponsePb setRefreshes(Collection refreshes) { + this.refreshes = refreshes; + return this; + } + + public Collection getRefreshes() { + return refreshes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorRefreshListResponsePb that = (MonitorRefreshListResponsePb) o; + return Objects.equals(refreshes, that.refreshes); + } + + @Override + public int hashCode() { + return Objects.hash(refreshes); + } + + @Override + public String toString() { + return new ToStringer(MonitorRefreshListResponsePb.class) + .add("refreshes", refreshes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java index c2c63dd78..840177ba6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorSnapshot.MonitorSnapshotSerializer.class) +@JsonDeserialize(using = MonitorSnapshot.MonitorSnapshotDeserializer.class) public class MonitorSnapshot { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(MonitorSnapshot.class).toString(); } + + MonitorSnapshotPb toPb() { + MonitorSnapshotPb pb = new MonitorSnapshotPb(); + + return pb; + } + + static MonitorSnapshot fromPb(MonitorSnapshotPb pb) { + MonitorSnapshot model = new MonitorSnapshot(); + + return model; + } + + public static class MonitorSnapshotSerializer extends JsonSerializer { + @Override + public void serialize(MonitorSnapshot value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorSnapshotPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorSnapshotDeserializer extends JsonDeserializer { + @Override + public MonitorSnapshot deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorSnapshotPb pb = mapper.readValue(p, MonitorSnapshotPb.class); + return MonitorSnapshot.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotPb.java new file mode 100755 index 000000000..fd879ec96 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class MonitorSnapshotPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(MonitorSnapshotPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java index 8ad8758fd..2147dc8a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MonitorTimeSeries.MonitorTimeSeriesSerializer.class) +@JsonDeserialize(using = MonitorTimeSeries.MonitorTimeSeriesDeserializer.class) public class MonitorTimeSeries { /** * Granularities for aggregating data into time windows based on their timestamp. Currently the * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}. */ - @JsonProperty("granularities") private Collection granularities; /** @@ -26,7 +36,6 @@ public class MonitorTimeSeries { *

[function]: * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html */ - @JsonProperty("timestamp_col") private String timestampCol; public MonitorTimeSeries setGranularities(Collection granularities) { @@ -68,4 +77,40 @@ public String toString() { .add("timestampCol", timestampCol) .toString(); } + + MonitorTimeSeriesPb toPb() { + MonitorTimeSeriesPb pb = new MonitorTimeSeriesPb(); + pb.setGranularities(granularities); + pb.setTimestampCol(timestampCol); + + return pb; + } + + static MonitorTimeSeries fromPb(MonitorTimeSeriesPb pb) { + MonitorTimeSeries model = new MonitorTimeSeries(); + model.setGranularities(pb.getGranularities()); + model.setTimestampCol(pb.getTimestampCol()); + + return model; + } + + public static class MonitorTimeSeriesSerializer extends JsonSerializer { + @Override + public void serialize(MonitorTimeSeries value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MonitorTimeSeriesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MonitorTimeSeriesDeserializer extends JsonDeserializer { + @Override + public MonitorTimeSeries deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MonitorTimeSeriesPb pb = mapper.readValue(p, MonitorTimeSeriesPb.class); + return MonitorTimeSeries.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesPb.java new file mode 100755 index 000000000..f1b618551 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MonitorTimeSeriesPb { + @JsonProperty("granularities") + private Collection granularities; + + @JsonProperty("timestamp_col") + private String timestampCol; + + public MonitorTimeSeriesPb setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public MonitorTimeSeriesPb setTimestampCol(String timestampCol) { + this.timestampCol = timestampCol; + return this; + } + + public String getTimestampCol() { + return timestampCol; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorTimeSeriesPb that = (MonitorTimeSeriesPb) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(timestampCol, that.timestampCol); + } + + @Override + public int hashCode() { + return Objects.hash(granularities, timestampCol); + } + + @Override + public String toString() { + return new ToStringer(MonitorTimeSeriesPb.class) + .add("granularities", granularities) + .add("timestampCol", timestampCol) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java index 3ef4eb978..d862bff20 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NamedTableConstraint.NamedTableConstraintSerializer.class) +@JsonDeserialize(using = NamedTableConstraint.NamedTableConstraintDeserializer.class) public class NamedTableConstraint { /** The name of the constraint. */ - @JsonProperty("name") private String name; public NamedTableConstraint setName(String name) { @@ -39,4 +49,40 @@ public int hashCode() { public String toString() { return new ToStringer(NamedTableConstraint.class).add("name", name).toString(); } + + NamedTableConstraintPb toPb() { + NamedTableConstraintPb pb = new NamedTableConstraintPb(); + pb.setName(name); + + return pb; + } + + static NamedTableConstraint fromPb(NamedTableConstraintPb pb) { + NamedTableConstraint model = new NamedTableConstraint(); + model.setName(pb.getName()); + + return model; + } + + public static class NamedTableConstraintSerializer extends JsonSerializer { + @Override + public void serialize( + NamedTableConstraint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NamedTableConstraintPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NamedTableConstraintDeserializer + extends JsonDeserializer { + @Override + public NamedTableConstraint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NamedTableConstraintPb pb = mapper.readValue(p, NamedTableConstraintPb.class); + return NamedTableConstraint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraintPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraintPb.java new file mode 100755 index 000000000..097b76444 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraintPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NamedTableConstraintPb { + @JsonProperty("name") + private String name; + + public NamedTableConstraintPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NamedTableConstraintPb that = (NamedTableConstraintPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(NamedTableConstraintPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java index d31041dde..b2d7f0664 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Online Table information. */ @Generated +@JsonSerialize(using = OnlineTable.OnlineTableSerializer.class) +@JsonDeserialize(using = OnlineTable.OnlineTableDeserializer.class) public class OnlineTable { /** Full three-part (catalog, schema, table) name of the table. */ - @JsonProperty("name") private String name; /** Specification of the online table. */ - @JsonProperty("spec") private OnlineTableSpec spec; /** Online Table data synchronization status */ - @JsonProperty("status") private OnlineTableStatus status; /** Data serving REST API URL for this table */ - @JsonProperty("table_serving_url") private String tableServingUrl; /** @@ -31,7 +38,6 @@ public class OnlineTable { * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline * may be in "PROVISIONING" as it runs asynchronously). */ - @JsonProperty("unity_catalog_provisioning_state") private ProvisioningInfoState unityCatalogProvisioningState; public OnlineTable setName(String name) { @@ -107,4 +113,45 @@ public String toString() { .add("unityCatalogProvisioningState", unityCatalogProvisioningState) .toString(); } + + OnlineTablePb toPb() { + OnlineTablePb pb = new OnlineTablePb(); + pb.setName(name); + pb.setSpec(spec); + pb.setStatus(status); + pb.setTableServingUrl(tableServingUrl); + pb.setUnityCatalogProvisioningState(unityCatalogProvisioningState); + + return pb; + } + + static OnlineTable fromPb(OnlineTablePb pb) { + OnlineTable model = new OnlineTable(); + model.setName(pb.getName()); + model.setSpec(pb.getSpec()); + model.setStatus(pb.getStatus()); + model.setTableServingUrl(pb.getTableServingUrl()); + model.setUnityCatalogProvisioningState(pb.getUnityCatalogProvisioningState()); + + return model; + } + + public static class OnlineTableSerializer extends JsonSerializer { + @Override + public void serialize(OnlineTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OnlineTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OnlineTableDeserializer extends JsonDeserializer { + @Override + public OnlineTable deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OnlineTablePb pb = mapper.readValue(p, OnlineTablePb.class); + return OnlineTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablePb.java new file mode 100755 index 000000000..79e769633 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Online Table information. */ +@Generated +class OnlineTablePb { + @JsonProperty("name") + private String name; + + @JsonProperty("spec") + private OnlineTableSpec spec; + + @JsonProperty("status") + private OnlineTableStatus status; + + @JsonProperty("table_serving_url") + private String tableServingUrl; + + @JsonProperty("unity_catalog_provisioning_state") + private ProvisioningInfoState unityCatalogProvisioningState; + + public OnlineTablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public OnlineTablePb setSpec(OnlineTableSpec spec) { + this.spec = spec; + return this; + } + + public OnlineTableSpec getSpec() { + return spec; + } + + public OnlineTablePb setStatus(OnlineTableStatus status) { + this.status = status; + return this; + } + + public OnlineTableStatus getStatus() { + return status; + } + + public OnlineTablePb setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + public OnlineTablePb setUnityCatalogProvisioningState( + ProvisioningInfoState unityCatalogProvisioningState) { + this.unityCatalogProvisioningState = unityCatalogProvisioningState; + return this; + } + + public ProvisioningInfoState getUnityCatalogProvisioningState() { + return unityCatalogProvisioningState; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OnlineTablePb that = (OnlineTablePb) o; + return Objects.equals(name, that.name) + && Objects.equals(spec, that.spec) + && Objects.equals(status, that.status) + && Objects.equals(tableServingUrl, that.tableServingUrl) + && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); + } + + @Override + public int hashCode() { + return Objects.hash(name, spec, status, tableServingUrl, unityCatalogProvisioningState); + } + + @Override + public String toString() { + return new ToStringer(OnlineTablePb.class) + .add("name", name) + .add("spec", spec) + .add("status", status) + .add("tableServingUrl", tableServingUrl) + .add("unityCatalogProvisioningState", unityCatalogProvisioningState) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java index 0cb21c63a..61e688aa5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Specification of an online table. */ @Generated +@JsonSerialize(using = OnlineTableSpec.OnlineTableSpecSerializer.class) +@JsonDeserialize(using = OnlineTableSpec.OnlineTableSpecDeserializer.class) public class OnlineTableSpec { /** * Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of @@ -19,34 +30,27 @@ public class OnlineTableSpec { * syncing views or tables without CDFs to online tables. Note that the full-copy pipeline only * supports "triggered" scheduling policy. */ - @JsonProperty("perform_full_copy") private Boolean performFullCopy; /** ID of the associated pipeline. Generated by the server - cannot be set by the caller. */ - @JsonProperty("pipeline_id") private String pipelineId; /** Primary Key columns to be used for data insert/update in the destination. */ - @JsonProperty("primary_key_columns") private Collection primaryKeyColumns; /** Pipeline runs continuously after generating the initial data. */ - @JsonProperty("run_continuously") private OnlineTableSpecContinuousSchedulingPolicy runContinuously; /** * Pipeline stops after generating the initial data and can be triggered later (manually, through * a cron job or through data triggers) */ - @JsonProperty("run_triggered") private OnlineTableSpecTriggeredSchedulingPolicy runTriggered; /** Three-part (catalog, schema, table) name of the source Delta table. */ - @JsonProperty("source_table_full_name") private String sourceTableFullName; /** Time series key to deduplicate (tie-break) rows with the same primary key. */ - @JsonProperty("timeseries_key") private String timeseriesKey; public OnlineTableSpec setPerformFullCopy(Boolean performFullCopy) { @@ -151,4 +155,50 @@ public String toString() { .add("timeseriesKey", timeseriesKey) .toString(); } + + OnlineTableSpecPb toPb() { + OnlineTableSpecPb pb = new OnlineTableSpecPb(); + pb.setPerformFullCopy(performFullCopy); + pb.setPipelineId(pipelineId); + pb.setPrimaryKeyColumns(primaryKeyColumns); + pb.setRunContinuously(runContinuously); + pb.setRunTriggered(runTriggered); + pb.setSourceTableFullName(sourceTableFullName); + pb.setTimeseriesKey(timeseriesKey); + + return pb; + } + + static OnlineTableSpec fromPb(OnlineTableSpecPb pb) { + OnlineTableSpec model = new OnlineTableSpec(); + model.setPerformFullCopy(pb.getPerformFullCopy()); + model.setPipelineId(pb.getPipelineId()); + model.setPrimaryKeyColumns(pb.getPrimaryKeyColumns()); + model.setRunContinuously(pb.getRunContinuously()); + model.setRunTriggered(pb.getRunTriggered()); + model.setSourceTableFullName(pb.getSourceTableFullName()); + model.setTimeseriesKey(pb.getTimeseriesKey()); + + return model; + } + + public static class OnlineTableSpecSerializer extends JsonSerializer { + @Override + public void serialize(OnlineTableSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OnlineTableSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OnlineTableSpecDeserializer extends JsonDeserializer { + @Override + public OnlineTableSpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OnlineTableSpecPb pb = mapper.readValue(p, OnlineTableSpecPb.class); + return OnlineTableSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java index bf10eb94d..b6831e4c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java @@ -4,9 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + OnlineTableSpecContinuousSchedulingPolicy + .OnlineTableSpecContinuousSchedulingPolicySerializer.class) +@JsonDeserialize( + using = + OnlineTableSpecContinuousSchedulingPolicy + .OnlineTableSpecContinuousSchedulingPolicyDeserializer.class) public class OnlineTableSpecContinuousSchedulingPolicy { @Override @@ -25,4 +43,45 @@ public int hashCode() { public String toString() { return new ToStringer(OnlineTableSpecContinuousSchedulingPolicy.class).toString(); } + + OnlineTableSpecContinuousSchedulingPolicyPb toPb() { + OnlineTableSpecContinuousSchedulingPolicyPb pb = + new OnlineTableSpecContinuousSchedulingPolicyPb(); + + return pb; + } + + static OnlineTableSpecContinuousSchedulingPolicy fromPb( + OnlineTableSpecContinuousSchedulingPolicyPb pb) { + OnlineTableSpecContinuousSchedulingPolicy model = + new OnlineTableSpecContinuousSchedulingPolicy(); + + return model; + } + + public static class OnlineTableSpecContinuousSchedulingPolicySerializer + extends JsonSerializer { + @Override + public void serialize( + OnlineTableSpecContinuousSchedulingPolicy value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + OnlineTableSpecContinuousSchedulingPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OnlineTableSpecContinuousSchedulingPolicyDeserializer + extends JsonDeserializer { + @Override + public OnlineTableSpecContinuousSchedulingPolicy deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OnlineTableSpecContinuousSchedulingPolicyPb pb = + mapper.readValue(p, OnlineTableSpecContinuousSchedulingPolicyPb.class); + return OnlineTableSpecContinuousSchedulingPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicyPb.java new file mode 100755 index 000000000..98c5d13da --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicyPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class OnlineTableSpecContinuousSchedulingPolicyPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(OnlineTableSpecContinuousSchedulingPolicyPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecPb.java new file mode 100755 index 000000000..f68240333 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Specification of an online table. */ +@Generated +class OnlineTableSpecPb { + @JsonProperty("perform_full_copy") + private Boolean performFullCopy; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("primary_key_columns") + private Collection primaryKeyColumns; + + @JsonProperty("run_continuously") + private OnlineTableSpecContinuousSchedulingPolicy runContinuously; + + @JsonProperty("run_triggered") + private OnlineTableSpecTriggeredSchedulingPolicy runTriggered; + + @JsonProperty("source_table_full_name") + private String sourceTableFullName; + + @JsonProperty("timeseries_key") + private String timeseriesKey; + + public OnlineTableSpecPb setPerformFullCopy(Boolean performFullCopy) { + this.performFullCopy = performFullCopy; + return this; + } + + public Boolean getPerformFullCopy() { + return performFullCopy; + } + + public OnlineTableSpecPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public OnlineTableSpecPb setPrimaryKeyColumns(Collection primaryKeyColumns) { + this.primaryKeyColumns = primaryKeyColumns; + return this; + } + + public Collection getPrimaryKeyColumns() { + return primaryKeyColumns; + } + + public OnlineTableSpecPb setRunContinuously( + OnlineTableSpecContinuousSchedulingPolicy runContinuously) { + this.runContinuously = runContinuously; + return this; + } + + public OnlineTableSpecContinuousSchedulingPolicy getRunContinuously() { + return runContinuously; + } + + public OnlineTableSpecPb setRunTriggered(OnlineTableSpecTriggeredSchedulingPolicy runTriggered) { + this.runTriggered = runTriggered; + return this; + } + + public OnlineTableSpecTriggeredSchedulingPolicy getRunTriggered() { + return runTriggered; + } + + public OnlineTableSpecPb setSourceTableFullName(String sourceTableFullName) { + this.sourceTableFullName = sourceTableFullName; + return this; + } + + public String getSourceTableFullName() { + return sourceTableFullName; + } + + public OnlineTableSpecPb setTimeseriesKey(String timeseriesKey) { + this.timeseriesKey = timeseriesKey; + return this; + } + + public String getTimeseriesKey() { + return timeseriesKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OnlineTableSpecPb that = (OnlineTableSpecPb) o; + return Objects.equals(performFullCopy, that.performFullCopy) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(primaryKeyColumns, that.primaryKeyColumns) + && Objects.equals(runContinuously, that.runContinuously) + && Objects.equals(runTriggered, that.runTriggered) + && Objects.equals(sourceTableFullName, that.sourceTableFullName) + && Objects.equals(timeseriesKey, that.timeseriesKey); + } + + @Override + public int hashCode() { + return Objects.hash( + performFullCopy, + pipelineId, + primaryKeyColumns, + runContinuously, + runTriggered, + sourceTableFullName, + timeseriesKey); + } + + @Override + public String toString() { + return new ToStringer(OnlineTableSpecPb.class) + .add("performFullCopy", performFullCopy) + .add("pipelineId", pipelineId) + .add("primaryKeyColumns", primaryKeyColumns) + .add("runContinuously", runContinuously) + .add("runTriggered", runTriggered) + .add("sourceTableFullName", sourceTableFullName) + .add("timeseriesKey", timeseriesKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicy.java index f9e731684..8446c14d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicy.java @@ -4,9 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + OnlineTableSpecTriggeredSchedulingPolicy.OnlineTableSpecTriggeredSchedulingPolicySerializer + .class) +@JsonDeserialize( + using = + OnlineTableSpecTriggeredSchedulingPolicy + .OnlineTableSpecTriggeredSchedulingPolicyDeserializer.class) public class OnlineTableSpecTriggeredSchedulingPolicy { @Override @@ -25,4 +43,44 @@ public int hashCode() { public String toString() { return new ToStringer(OnlineTableSpecTriggeredSchedulingPolicy.class).toString(); } + + OnlineTableSpecTriggeredSchedulingPolicyPb toPb() { + OnlineTableSpecTriggeredSchedulingPolicyPb pb = + new OnlineTableSpecTriggeredSchedulingPolicyPb(); + + return pb; + } + + static OnlineTableSpecTriggeredSchedulingPolicy fromPb( + OnlineTableSpecTriggeredSchedulingPolicyPb pb) { + OnlineTableSpecTriggeredSchedulingPolicy model = new OnlineTableSpecTriggeredSchedulingPolicy(); + + return model; + } + + public static class OnlineTableSpecTriggeredSchedulingPolicySerializer + extends JsonSerializer { + @Override + public void serialize( + OnlineTableSpecTriggeredSchedulingPolicy value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + OnlineTableSpecTriggeredSchedulingPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OnlineTableSpecTriggeredSchedulingPolicyDeserializer + extends JsonDeserializer { + @Override + public OnlineTableSpecTriggeredSchedulingPolicy deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OnlineTableSpecTriggeredSchedulingPolicyPb pb = + mapper.readValue(p, OnlineTableSpecTriggeredSchedulingPolicyPb.class); + return OnlineTableSpecTriggeredSchedulingPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicyPb.java new file mode 100755 index 000000000..9f6f106a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecTriggeredSchedulingPolicyPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class OnlineTableSpecTriggeredSchedulingPolicyPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(OnlineTableSpecTriggeredSchedulingPolicyPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java index 11f64ecf9..37ec8d631 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java @@ -4,46 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Status of an online table. */ @Generated +@JsonSerialize(using = OnlineTableStatus.OnlineTableStatusSerializer.class) +@JsonDeserialize(using = OnlineTableStatus.OnlineTableStatusDeserializer.class) public class OnlineTableStatus { /** * Detailed status of an online table. Shown if the online table is in the * ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state. */ - @JsonProperty("continuous_update_status") private ContinuousUpdateStatus continuousUpdateStatus; /** The state of the online table. */ - @JsonProperty("detailed_state") private OnlineTableState detailedState; /** * Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the * ONLINE_PIPELINE_FAILED state. */ - @JsonProperty("failed_status") private FailedStatus failedStatus; /** A text description of the current state of the online table. */ - @JsonProperty("message") private String message; /** * Detailed status of an online table. Shown if the online table is in the * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. */ - @JsonProperty("provisioning_status") private ProvisioningStatus provisioningStatus; /** * Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE * or the ONLINE_NO_PENDING_UPDATE state. */ - @JsonProperty("triggered_update_status") private TriggeredUpdateStatus triggeredUpdateStatus; public OnlineTableStatus setContinuousUpdateStatus( @@ -136,4 +141,48 @@ public String toString() { .add("triggeredUpdateStatus", triggeredUpdateStatus) .toString(); } + + OnlineTableStatusPb toPb() { + OnlineTableStatusPb pb = new OnlineTableStatusPb(); + pb.setContinuousUpdateStatus(continuousUpdateStatus); + pb.setDetailedState(detailedState); + pb.setFailedStatus(failedStatus); + pb.setMessage(message); + pb.setProvisioningStatus(provisioningStatus); + pb.setTriggeredUpdateStatus(triggeredUpdateStatus); + + return pb; + } + + static OnlineTableStatus fromPb(OnlineTableStatusPb pb) { + OnlineTableStatus model = new OnlineTableStatus(); + model.setContinuousUpdateStatus(pb.getContinuousUpdateStatus()); + model.setDetailedState(pb.getDetailedState()); + model.setFailedStatus(pb.getFailedStatus()); + model.setMessage(pb.getMessage()); + model.setProvisioningStatus(pb.getProvisioningStatus()); + model.setTriggeredUpdateStatus(pb.getTriggeredUpdateStatus()); + + return model; + } + + public static class OnlineTableStatusSerializer extends JsonSerializer { + @Override + public void serialize(OnlineTableStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OnlineTableStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OnlineTableStatusDeserializer extends JsonDeserializer { + @Override + public OnlineTableStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OnlineTableStatusPb pb = mapper.readValue(p, OnlineTableStatusPb.class); + return OnlineTableStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatusPb.java new file mode 100755 index 000000000..b8c8e6725 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatusPb.java @@ -0,0 +1,121 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status of an online table. */ +@Generated +class OnlineTableStatusPb { + @JsonProperty("continuous_update_status") + private ContinuousUpdateStatus continuousUpdateStatus; + + @JsonProperty("detailed_state") + private OnlineTableState detailedState; + + @JsonProperty("failed_status") + private FailedStatus failedStatus; + + @JsonProperty("message") + private String message; + + @JsonProperty("provisioning_status") + private ProvisioningStatus provisioningStatus; + + @JsonProperty("triggered_update_status") + private TriggeredUpdateStatus triggeredUpdateStatus; + + public OnlineTableStatusPb setContinuousUpdateStatus( + ContinuousUpdateStatus continuousUpdateStatus) { + this.continuousUpdateStatus = continuousUpdateStatus; + return this; + } + + public ContinuousUpdateStatus getContinuousUpdateStatus() { + return continuousUpdateStatus; + } + + public OnlineTableStatusPb setDetailedState(OnlineTableState detailedState) { + this.detailedState = detailedState; + return this; + } + + public OnlineTableState getDetailedState() { + return detailedState; + } + + public OnlineTableStatusPb setFailedStatus(FailedStatus failedStatus) { + this.failedStatus = failedStatus; + return this; + } + + public FailedStatus getFailedStatus() { + return failedStatus; + } + + public OnlineTableStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public OnlineTableStatusPb setProvisioningStatus(ProvisioningStatus provisioningStatus) { + this.provisioningStatus = provisioningStatus; + return this; + } + + public ProvisioningStatus getProvisioningStatus() { + return provisioningStatus; + } + + public OnlineTableStatusPb setTriggeredUpdateStatus(TriggeredUpdateStatus triggeredUpdateStatus) { + this.triggeredUpdateStatus = triggeredUpdateStatus; + return this; + } + + public TriggeredUpdateStatus getTriggeredUpdateStatus() { + return triggeredUpdateStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OnlineTableStatusPb that = (OnlineTableStatusPb) o; + return Objects.equals(continuousUpdateStatus, that.continuousUpdateStatus) + && Objects.equals(detailedState, that.detailedState) + && Objects.equals(failedStatus, that.failedStatus) + && Objects.equals(message, that.message) + && Objects.equals(provisioningStatus, that.provisioningStatus) + && Objects.equals(triggeredUpdateStatus, that.triggeredUpdateStatus); + } + + @Override + public int hashCode() { + return Objects.hash( + continuousUpdateStatus, + detailedState, + failedStatus, + message, + provisioningStatus, + triggeredUpdateStatus); + } + + @Override + public String toString() { + return new ToStringer(OnlineTableStatusPb.class) + .add("continuousUpdateStatus", continuousUpdateStatus) + .add("detailedState", detailedState) + .add("failedStatus", failedStatus) + .add("message", message) + .add("provisioningStatus", provisioningStatus) + .add("triggeredUpdateStatus", triggeredUpdateStatus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index 2121a24ce..d387e94ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -21,7 +21,7 @@ public OnlineTable create(CreateOnlineTableRequest request) { String path = "/api/2.0/online-tables"; try { Request req = new Request("POST", path, apiClient.serialize(request.getTable())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, OnlineTable.class); @@ -35,7 +35,7 @@ public void delete(DeleteOnlineTableRequest request) { String path = String.format("/api/2.0/online-tables/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public OnlineTable get(GetOnlineTableRequest request) { String path = String.format("/api/2.0/online-tables/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, OnlineTable.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java index 548721fde..ee95bd39c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionsChange.PermissionsChangeSerializer.class) +@JsonDeserialize(using = PermissionsChange.PermissionsChangeDeserializer.class) public class PermissionsChange { /** The set of privileges to add. */ - @JsonProperty("add") private Collection add; /** The principal whose privileges we are changing. */ - @JsonProperty("principal") private String principal; /** The set of privileges to remove. */ - @JsonProperty("remove") private Collection remove; public PermissionsChange setAdd(Collection add) { @@ -72,4 +80,42 @@ public String toString() { .add("remove", remove) .toString(); } + + PermissionsChangePb toPb() { + PermissionsChangePb pb = new PermissionsChangePb(); + pb.setAdd(add); + pb.setPrincipal(principal); + pb.setRemove(remove); + + return pb; + } + + static PermissionsChange fromPb(PermissionsChangePb pb) { + PermissionsChange model = new PermissionsChange(); + model.setAdd(pb.getAdd()); + model.setPrincipal(pb.getPrincipal()); + model.setRemove(pb.getRemove()); + + return model; + } + + public static class PermissionsChangeSerializer extends JsonSerializer { + @Override + public void serialize(PermissionsChange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionsChangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionsChangeDeserializer extends JsonDeserializer { + @Override + public PermissionsChange deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionsChangePb pb = mapper.readValue(p, PermissionsChangePb.class); + return PermissionsChange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChangePb.java new file mode 100755 index 000000000..ac2aac235 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChangePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PermissionsChangePb { + @JsonProperty("add") + private Collection add; + + @JsonProperty("principal") + private String principal; + + @JsonProperty("remove") + private Collection remove; + + public PermissionsChangePb setAdd(Collection add) { + this.add = add; + return this; + } + + public Collection getAdd() { + return add; + } + + public PermissionsChangePb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PermissionsChangePb setRemove(Collection remove) { + this.remove = remove; + return this; + } + + public Collection getRemove() { + return remove; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionsChangePb that = (PermissionsChangePb) o; + return Objects.equals(add, that.add) + && Objects.equals(principal, that.principal) + && Objects.equals(remove, that.remove); + } + + @Override + public int hashCode() { + return Objects.hash(add, principal, remove); + } + + @Override + public String toString() { + return new ToStringer(PermissionsChangePb.class) + .add("add", add) + .add("principal", principal) + .add("remove", remove) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java index ba6234b65..bfaca6a88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionsList.PermissionsListSerializer.class) +@JsonDeserialize(using = PermissionsList.PermissionsListDeserializer.class) public class PermissionsList { /** The privileges assigned to each principal */ - @JsonProperty("privilege_assignments") private Collection privilegeAssignments; public PermissionsList setPrivilegeAssignments( @@ -43,4 +53,38 @@ public String toString() { .add("privilegeAssignments", privilegeAssignments) .toString(); } + + PermissionsListPb toPb() { + PermissionsListPb pb = new PermissionsListPb(); + pb.setPrivilegeAssignments(privilegeAssignments); + + return pb; + } + + static PermissionsList fromPb(PermissionsListPb pb) { + PermissionsList model = new PermissionsList(); + model.setPrivilegeAssignments(pb.getPrivilegeAssignments()); + + return model; + } + + public static class PermissionsListSerializer extends JsonSerializer { + @Override + public void serialize(PermissionsList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionsListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionsListDeserializer extends JsonDeserializer { + @Override + public PermissionsList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionsListPb pb = mapper.readValue(p, PermissionsListPb.class); + return PermissionsList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsListPb.java new file mode 100755 index 000000000..3c9c8c3f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsListPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PermissionsListPb { + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public PermissionsListPb setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionsListPb that = (PermissionsListPb) o; + return Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(PermissionsListPb.class) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java index 36f1fa047..5e3105996 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java @@ -4,35 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Progress information of the Online Table data synchronization pipeline. */ @Generated +@JsonSerialize(using = PipelineProgress.PipelineProgressSerializer.class) +@JsonDeserialize(using = PipelineProgress.PipelineProgressDeserializer.class) public class PipelineProgress { /** The estimated time remaining to complete this update in seconds. */ - @JsonProperty("estimated_completion_time_seconds") private Double estimatedCompletionTimeSeconds; /** * The source table Delta version that was last processed by the pipeline. The pipeline may not * have completely processed this version yet. */ - @JsonProperty("latest_version_currently_processing") private Long latestVersionCurrentlyProcessing; /** The completion ratio of this update. This is a number between 0 and 1. */ - @JsonProperty("sync_progress_completion") private Double syncProgressCompletion; /** The number of rows that have been synced in this update. */ - @JsonProperty("synced_row_count") private Long syncedRowCount; /** * The total number of rows that need to be synced in this update. This number may be an estimate. */ - @JsonProperty("total_row_count") private Long totalRowCount; public PipelineProgress setEstimatedCompletionTimeSeconds(Double estimatedCompletionTimeSeconds) { @@ -113,4 +119,46 @@ public String toString() { .add("totalRowCount", totalRowCount) .toString(); } + + PipelineProgressPb toPb() { + PipelineProgressPb pb = new PipelineProgressPb(); + pb.setEstimatedCompletionTimeSeconds(estimatedCompletionTimeSeconds); + pb.setLatestVersionCurrentlyProcessing(latestVersionCurrentlyProcessing); + pb.setSyncProgressCompletion(syncProgressCompletion); + pb.setSyncedRowCount(syncedRowCount); + pb.setTotalRowCount(totalRowCount); + + return pb; + } + + static PipelineProgress fromPb(PipelineProgressPb pb) { + PipelineProgress model = new PipelineProgress(); + model.setEstimatedCompletionTimeSeconds(pb.getEstimatedCompletionTimeSeconds()); + model.setLatestVersionCurrentlyProcessing(pb.getLatestVersionCurrentlyProcessing()); + model.setSyncProgressCompletion(pb.getSyncProgressCompletion()); + model.setSyncedRowCount(pb.getSyncedRowCount()); + model.setTotalRowCount(pb.getTotalRowCount()); + + return model; + } + + public static class PipelineProgressSerializer extends JsonSerializer { + @Override + public void serialize(PipelineProgress value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineProgressPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineProgressDeserializer extends JsonDeserializer { + @Override + public PipelineProgress deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineProgressPb pb = mapper.readValue(p, PipelineProgressPb.class); + return PipelineProgress.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgressPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgressPb.java new file mode 100755 index 000000000..40276fb37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgressPb.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Progress information of the Online Table data synchronization pipeline. */ +@Generated +class PipelineProgressPb { + @JsonProperty("estimated_completion_time_seconds") + private Double estimatedCompletionTimeSeconds; + + @JsonProperty("latest_version_currently_processing") + private Long latestVersionCurrentlyProcessing; + + @JsonProperty("sync_progress_completion") + private Double syncProgressCompletion; + + @JsonProperty("synced_row_count") + private Long syncedRowCount; + + @JsonProperty("total_row_count") + private Long totalRowCount; + + public PipelineProgressPb setEstimatedCompletionTimeSeconds( + Double estimatedCompletionTimeSeconds) { + this.estimatedCompletionTimeSeconds = estimatedCompletionTimeSeconds; + return this; + } + + public Double getEstimatedCompletionTimeSeconds() { + return estimatedCompletionTimeSeconds; + } + + public PipelineProgressPb setLatestVersionCurrentlyProcessing( + Long latestVersionCurrentlyProcessing) { + this.latestVersionCurrentlyProcessing = latestVersionCurrentlyProcessing; + return this; + } + + public Long getLatestVersionCurrentlyProcessing() { + return latestVersionCurrentlyProcessing; + } + + public PipelineProgressPb setSyncProgressCompletion(Double syncProgressCompletion) { + this.syncProgressCompletion = syncProgressCompletion; + return this; + } + + public Double getSyncProgressCompletion() { + return syncProgressCompletion; + } + + public PipelineProgressPb setSyncedRowCount(Long syncedRowCount) { + this.syncedRowCount = syncedRowCount; + return this; + } + + public Long getSyncedRowCount() { + return syncedRowCount; + } + + public PipelineProgressPb setTotalRowCount(Long totalRowCount) { + this.totalRowCount = totalRowCount; + return this; + } + + public Long getTotalRowCount() { + return totalRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineProgressPb that = (PipelineProgressPb) o; + return Objects.equals(estimatedCompletionTimeSeconds, that.estimatedCompletionTimeSeconds) + && Objects.equals(latestVersionCurrentlyProcessing, that.latestVersionCurrentlyProcessing) + && Objects.equals(syncProgressCompletion, that.syncProgressCompletion) + && Objects.equals(syncedRowCount, that.syncedRowCount) + && Objects.equals(totalRowCount, that.totalRowCount); + } + + @Override + public int hashCode() { + return Objects.hash( + estimatedCompletionTimeSeconds, + latestVersionCurrentlyProcessing, + syncProgressCompletion, + syncedRowCount, + totalRowCount); + } + + @Override + public String toString() { + return new ToStringer(PipelineProgressPb.class) + .add("estimatedCompletionTimeSeconds", estimatedCompletionTimeSeconds) + .add("latestVersionCurrentlyProcessing", latestVersionCurrentlyProcessing) + .add("syncProgressCompletion", syncProgressCompletion) + .add("syncedRowCount", syncedRowCount) + .add("totalRowCount", totalRowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java index 5d13fbbd4..6143f0b7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PrimaryKeyConstraint.PrimaryKeyConstraintSerializer.class) +@JsonDeserialize(using = PrimaryKeyConstraint.PrimaryKeyConstraintDeserializer.class) public class PrimaryKeyConstraint { /** Column names for this constraint. */ - @JsonProperty("child_columns") private Collection childColumns; /** The name of the constraint. */ - @JsonProperty("name") private String name; /** Column names that represent a timeseries. */ - @JsonProperty("timeseries_columns") private Collection timeseriesColumns; public PrimaryKeyConstraint setChildColumns(Collection childColumns) { @@ -72,4 +80,44 @@ public String toString() { .add("timeseriesColumns", timeseriesColumns) .toString(); } + + PrimaryKeyConstraintPb toPb() { + PrimaryKeyConstraintPb pb = new PrimaryKeyConstraintPb(); + pb.setChildColumns(childColumns); + pb.setName(name); + pb.setTimeseriesColumns(timeseriesColumns); + + return pb; + } + + static PrimaryKeyConstraint fromPb(PrimaryKeyConstraintPb pb) { + PrimaryKeyConstraint model = new PrimaryKeyConstraint(); + model.setChildColumns(pb.getChildColumns()); + model.setName(pb.getName()); + model.setTimeseriesColumns(pb.getTimeseriesColumns()); + + return model; + } + + public static class PrimaryKeyConstraintSerializer extends JsonSerializer { + @Override + public void serialize( + PrimaryKeyConstraint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PrimaryKeyConstraintPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PrimaryKeyConstraintDeserializer + extends JsonDeserializer { + @Override + public PrimaryKeyConstraint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PrimaryKeyConstraintPb pb = mapper.readValue(p, PrimaryKeyConstraintPb.class); + return PrimaryKeyConstraint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraintPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraintPb.java new file mode 100755 index 000000000..48d6b831f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraintPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PrimaryKeyConstraintPb { + @JsonProperty("child_columns") + private Collection childColumns; + + @JsonProperty("name") + private String name; + + @JsonProperty("timeseries_columns") + private Collection timeseriesColumns; + + public PrimaryKeyConstraintPb setChildColumns(Collection childColumns) { + this.childColumns = childColumns; + return this; + } + + public Collection getChildColumns() { + return childColumns; + } + + public PrimaryKeyConstraintPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PrimaryKeyConstraintPb setTimeseriesColumns(Collection timeseriesColumns) { + this.timeseriesColumns = timeseriesColumns; + return this; + } + + public Collection getTimeseriesColumns() { + return timeseriesColumns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PrimaryKeyConstraintPb that = (PrimaryKeyConstraintPb) o; + return Objects.equals(childColumns, that.childColumns) + && Objects.equals(name, that.name) + && Objects.equals(timeseriesColumns, that.timeseriesColumns); + } + + @Override + public int hashCode() { + return Objects.hash(childColumns, name, timeseriesColumns); + } + + @Override + public String toString() { + return new ToStringer(PrimaryKeyConstraintPb.class) + .add("childColumns", childColumns) + .add("name", name) + .add("timeseriesColumns", timeseriesColumns) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java index 9c9f97d13..c23758444 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PrivilegeAssignment.PrivilegeAssignmentSerializer.class) +@JsonDeserialize(using = PrivilegeAssignment.PrivilegeAssignmentDeserializer.class) public class PrivilegeAssignment { /** The principal (user email address or group name). */ - @JsonProperty("principal") private String principal; /** The privileges assigned to the principal. */ - @JsonProperty("privileges") private Collection privileges; public PrivilegeAssignment setPrincipal(String principal) { @@ -56,4 +65,41 @@ public String toString() { .add("privileges", privileges) .toString(); } + + PrivilegeAssignmentPb toPb() { + PrivilegeAssignmentPb pb = new PrivilegeAssignmentPb(); + pb.setPrincipal(principal); + pb.setPrivileges(privileges); + + return pb; + } + + static PrivilegeAssignment fromPb(PrivilegeAssignmentPb pb) { + PrivilegeAssignment model = new PrivilegeAssignment(); + model.setPrincipal(pb.getPrincipal()); + model.setPrivileges(pb.getPrivileges()); + + return model; + } + + public static class PrivilegeAssignmentSerializer extends JsonSerializer { + @Override + public void serialize(PrivilegeAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PrivilegeAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PrivilegeAssignmentDeserializer + extends JsonDeserializer { + @Override + public PrivilegeAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PrivilegeAssignmentPb pb = mapper.readValue(p, PrivilegeAssignmentPb.class); + return PrivilegeAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignmentPb.java new file mode 100755 index 000000000..6e2cdf05f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignmentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PrivilegeAssignmentPb { + @JsonProperty("principal") + private String principal; + + @JsonProperty("privileges") + private Collection privileges; + + public PrivilegeAssignmentPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PrivilegeAssignmentPb setPrivileges(Collection privileges) { + this.privileges = privileges; + return this; + } + + public Collection getPrivileges() { + return privileges; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PrivilegeAssignmentPb that = (PrivilegeAssignmentPb) o; + return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges); + } + + @Override + public int hashCode() { + return Objects.hash(principal, privileges); + } + + @Override + public String toString() { + return new ToStringer(PrivilegeAssignmentPb.class) + .add("principal", principal) + .add("privileges", privileges) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java index d124c65b5..54ed31220 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Status of an asynchronously provisioned resource. */ @Generated +@JsonSerialize(using = ProvisioningInfo.ProvisioningInfoSerializer.class) +@JsonDeserialize(using = ProvisioningInfo.ProvisioningInfoDeserializer.class) public class ProvisioningInfo { /** The provisioning state of the resource. */ - @JsonProperty("state") private ProvisioningInfoState state; public ProvisioningInfo setState(ProvisioningInfoState state) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ProvisioningInfo.class).add("state", state).toString(); } + + ProvisioningInfoPb toPb() { + ProvisioningInfoPb pb = new ProvisioningInfoPb(); + pb.setState(state); + + return pb; + } + + static ProvisioningInfo fromPb(ProvisioningInfoPb pb) { + ProvisioningInfo model = new ProvisioningInfo(); + model.setState(pb.getState()); + + return model; + } + + public static class ProvisioningInfoSerializer extends JsonSerializer { + @Override + public void serialize(ProvisioningInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProvisioningInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProvisioningInfoDeserializer extends JsonDeserializer { + @Override + public ProvisioningInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProvisioningInfoPb pb = mapper.readValue(p, ProvisioningInfoPb.class); + return ProvisioningInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoPb.java new file mode 100755 index 000000000..f47232928 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status of an asynchronously provisioned resource. */ +@Generated +class ProvisioningInfoPb { + @JsonProperty("state") + private ProvisioningInfoState state; + + public ProvisioningInfoPb setState(ProvisioningInfoState state) { + this.state = state; + return this; + } + + public ProvisioningInfoState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProvisioningInfoPb that = (ProvisioningInfoPb) o; + return Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(state); + } + + @Override + public String toString() { + return new ToStringer(ProvisioningInfoPb.class).add("state", state).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatus.java index df55a0306..c9a035002 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatus.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,12 +21,13 @@ * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. */ @Generated +@JsonSerialize(using = ProvisioningStatus.ProvisioningStatusSerializer.class) +@JsonDeserialize(using = ProvisioningStatus.ProvisioningStatusDeserializer.class) public class ProvisioningStatus { /** * Details about initial data synchronization. Only populated when in the * PROVISIONING_INITIAL_SNAPSHOT state. */ - @JsonProperty("initial_pipeline_sync_progress") private PipelineProgress initialPipelineSyncProgress; public ProvisioningStatus setInitialPipelineSyncProgress( @@ -49,4 +59,38 @@ public String toString() { .add("initialPipelineSyncProgress", initialPipelineSyncProgress) .toString(); } + + ProvisioningStatusPb toPb() { + ProvisioningStatusPb pb = new ProvisioningStatusPb(); + pb.setInitialPipelineSyncProgress(initialPipelineSyncProgress); + + return pb; + } + + static ProvisioningStatus fromPb(ProvisioningStatusPb pb) { + ProvisioningStatus model = new ProvisioningStatus(); + model.setInitialPipelineSyncProgress(pb.getInitialPipelineSyncProgress()); + + return model; + } + + public static class ProvisioningStatusSerializer extends JsonSerializer { + @Override + public void serialize(ProvisioningStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProvisioningStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProvisioningStatusDeserializer extends JsonDeserializer { + @Override + public ProvisioningStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProvisioningStatusPb pb = mapper.readValue(p, ProvisioningStatusPb.class); + return ProvisioningStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatusPb.java new file mode 100755 index 000000000..f5b36ae95 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningStatusPb.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of an online table. Shown if the online table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ +@Generated +class ProvisioningStatusPb { + @JsonProperty("initial_pipeline_sync_progress") + private PipelineProgress initialPipelineSyncProgress; + + public ProvisioningStatusPb setInitialPipelineSyncProgress( + PipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public PipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProvisioningStatusPb that = (ProvisioningStatusPb) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress); + } + + @Override + public String toString() { + return new ToStringer(ProvisioningStatusPb.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java index 78553ca8e..44c536058 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java @@ -24,7 +24,7 @@ public void cancelRefresh(CancelRefreshRequest request) { request.getTableName(), request.getRefreshId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, CancelRefreshResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -36,7 +36,7 @@ public MonitorInfo create(CreateMonitor request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, MonitorInfo.class); @@ -50,7 +50,7 @@ public void delete(DeleteQualityMonitorRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -62,7 +62,7 @@ public MonitorInfo get(GetQualityMonitorRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, MonitorInfo.class); } catch (IOException e) { @@ -78,7 +78,7 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) { request.getTableName(), request.getRefreshId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, MonitorRefreshInfo.class); } catch (IOException e) { @@ -92,7 +92,7 @@ public MonitorRefreshListResponse listRefreshes(ListRefreshesRequest request) { String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, MonitorRefreshListResponse.class); } catch (IOException e) { @@ -107,7 +107,7 @@ public RegenerateDashboardResponse regenerateDashboard(RegenerateDashboardReques "/api/2.1/quality-monitoring/tables/%s/monitor/dashboard", request.getTableName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegenerateDashboardResponse.class); @@ -122,7 +122,7 @@ public MonitorRefreshInfo runRefresh(RunRefreshRequest request) { String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, MonitorRefreshInfo.class); } catch (IOException e) { @@ -135,7 +135,7 @@ public MonitorInfo update(UpdateMonitor request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, MonitorInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfo.java index 357a4966e..7fe20f261 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfo.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QuotaInfo.QuotaInfoSerializer.class) +@JsonDeserialize(using = QuotaInfo.QuotaInfoDeserializer.class) public class QuotaInfo { /** The timestamp that indicates when the quota count was last updated. */ - @JsonProperty("last_refreshed_at") private Long lastRefreshedAt; /** Name of the parent resource. Returns metastore ID if the parent is a metastore. */ - @JsonProperty("parent_full_name") private String parentFullName; /** The quota parent securable type. */ - @JsonProperty("parent_securable_type") private SecurableType parentSecurableType; /** The current usage of the resource quota. */ - @JsonProperty("quota_count") private Long quotaCount; /** The current limit of the resource quota. */ - @JsonProperty("quota_limit") private Long quotaLimit; /** The name of the quota. */ - @JsonProperty("quota_name") private String quotaName; public QuotaInfo setLastRefreshedAt(Long lastRefreshedAt) { @@ -117,4 +122,47 @@ public String toString() { .add("quotaName", quotaName) .toString(); } + + QuotaInfoPb toPb() { + QuotaInfoPb pb = new QuotaInfoPb(); + pb.setLastRefreshedAt(lastRefreshedAt); + pb.setParentFullName(parentFullName); + pb.setParentSecurableType(parentSecurableType); + pb.setQuotaCount(quotaCount); + pb.setQuotaLimit(quotaLimit); + pb.setQuotaName(quotaName); + + return pb; + } + + static QuotaInfo fromPb(QuotaInfoPb pb) { + QuotaInfo model = new QuotaInfo(); + model.setLastRefreshedAt(pb.getLastRefreshedAt()); + model.setParentFullName(pb.getParentFullName()); + model.setParentSecurableType(pb.getParentSecurableType()); + model.setQuotaCount(pb.getQuotaCount()); + model.setQuotaLimit(pb.getQuotaLimit()); + model.setQuotaName(pb.getQuotaName()); + + return model; + } + + public static class QuotaInfoSerializer extends JsonSerializer { + @Override + public void serialize(QuotaInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QuotaInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QuotaInfoDeserializer extends JsonDeserializer { + @Override + public QuotaInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QuotaInfoPb pb = mapper.readValue(p, QuotaInfoPb.class); + return QuotaInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfoPb.java new file mode 100755 index 000000000..90211c15a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfoPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QuotaInfoPb { + @JsonProperty("last_refreshed_at") + private Long lastRefreshedAt; + + @JsonProperty("parent_full_name") + private String parentFullName; + + @JsonProperty("parent_securable_type") + private SecurableType parentSecurableType; + + @JsonProperty("quota_count") + private Long quotaCount; + + @JsonProperty("quota_limit") + private Long quotaLimit; + + @JsonProperty("quota_name") + private String quotaName; + + public QuotaInfoPb setLastRefreshedAt(Long lastRefreshedAt) { + this.lastRefreshedAt = lastRefreshedAt; + return this; + } + + public Long getLastRefreshedAt() { + return lastRefreshedAt; + } + + public QuotaInfoPb setParentFullName(String parentFullName) { + this.parentFullName = parentFullName; + return this; + } + + public String getParentFullName() { + return parentFullName; + } + + public QuotaInfoPb setParentSecurableType(SecurableType parentSecurableType) { + this.parentSecurableType = parentSecurableType; + return this; + } + + public SecurableType getParentSecurableType() { + return parentSecurableType; + } + + public QuotaInfoPb setQuotaCount(Long quotaCount) { + this.quotaCount = quotaCount; + return this; + } + + public Long getQuotaCount() { + return quotaCount; + } + + public QuotaInfoPb setQuotaLimit(Long quotaLimit) { + this.quotaLimit = quotaLimit; + return this; + } + + public Long getQuotaLimit() { + return quotaLimit; + } + + public QuotaInfoPb setQuotaName(String quotaName) { + this.quotaName = quotaName; + return this; + } + + public String getQuotaName() { + return quotaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QuotaInfoPb that = (QuotaInfoPb) o; + return Objects.equals(lastRefreshedAt, that.lastRefreshedAt) + && Objects.equals(parentFullName, that.parentFullName) + && Objects.equals(parentSecurableType, that.parentSecurableType) + && Objects.equals(quotaCount, that.quotaCount) + && Objects.equals(quotaLimit, that.quotaLimit) + && Objects.equals(quotaName, that.quotaName); + } + + @Override + public int hashCode() { + return Objects.hash( + lastRefreshedAt, parentFullName, parentSecurableType, quotaCount, quotaLimit, quotaName); + } + + @Override + public String toString() { + return new ToStringer(QuotaInfoPb.class) + .add("lastRefreshedAt", lastRefreshedAt) + .add("parentFullName", parentFullName) + .add("parentSecurableType", parentSecurableType) + .add("quotaCount", quotaCount) + .add("quotaLimit", quotaLimit) + .add("quotaName", quotaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java index 5a157c340..276362b85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,17 +21,16 @@ * https://developers.cloudflare.com/r2/api/s3/tokens/. */ @Generated +@JsonSerialize(using = R2Credentials.R2CredentialsSerializer.class) +@JsonDeserialize(using = R2Credentials.R2CredentialsDeserializer.class) public class R2Credentials { /** The access key ID that identifies the temporary credentials. */ - @JsonProperty("access_key_id") private String accessKeyId; /** The secret access key associated with the access key. */ - @JsonProperty("secret_access_key") private String secretAccessKey; /** The generated JWT that users must pass to use the temporary credentials. */ - @JsonProperty("session_token") private String sessionToken; public R2Credentials setAccessKeyId(String accessKeyId) { @@ -75,4 +83,41 @@ public String toString() { .add("sessionToken", sessionToken) .toString(); } + + R2CredentialsPb toPb() { + R2CredentialsPb pb = new R2CredentialsPb(); + pb.setAccessKeyId(accessKeyId); + pb.setSecretAccessKey(secretAccessKey); + pb.setSessionToken(sessionToken); + + return pb; + } + + static R2Credentials fromPb(R2CredentialsPb pb) { + R2Credentials model = new R2Credentials(); + model.setAccessKeyId(pb.getAccessKeyId()); + model.setSecretAccessKey(pb.getSecretAccessKey()); + model.setSessionToken(pb.getSessionToken()); + + return model; + } + + public static class R2CredentialsSerializer extends JsonSerializer { + @Override + public void serialize(R2Credentials value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + R2CredentialsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class R2CredentialsDeserializer extends JsonDeserializer { + @Override + public R2Credentials deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + R2CredentialsPb pb = mapper.readValue(p, R2CredentialsPb.class); + return R2Credentials.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2CredentialsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2CredentialsPb.java new file mode 100755 index 000000000..575a294bd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2CredentialsPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * R2 temporary credentials for API authentication. Read more at + * https://developers.cloudflare.com/r2/api/s3/tokens/. + */ +@Generated +class R2CredentialsPb { + @JsonProperty("access_key_id") + private String accessKeyId; + + @JsonProperty("secret_access_key") + private String secretAccessKey; + + @JsonProperty("session_token") + private String sessionToken; + + public R2CredentialsPb setAccessKeyId(String accessKeyId) { + this.accessKeyId = accessKeyId; + return this; + } + + public String getAccessKeyId() { + return accessKeyId; + } + + public R2CredentialsPb setSecretAccessKey(String secretAccessKey) { + this.secretAccessKey = secretAccessKey; + return this; + } + + public String getSecretAccessKey() { + return secretAccessKey; + } + + public R2CredentialsPb setSessionToken(String sessionToken) { + this.sessionToken = sessionToken; + return this; + } + + public String getSessionToken() { + return sessionToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + R2CredentialsPb that = (R2CredentialsPb) o; + return Objects.equals(accessKeyId, that.accessKeyId) + && Objects.equals(secretAccessKey, that.secretAccessKey) + && Objects.equals(sessionToken, that.sessionToken); + } + + @Override + public int hashCode() { + return Objects.hash(accessKeyId, secretAccessKey, sessionToken); + } + + @Override + public String toString() { + return new ToStringer(R2CredentialsPb.class) + .add("accessKeyId", accessKeyId) + .add("secretAccessKey", secretAccessKey) + .add("sessionToken", sessionToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java index db3ff1835..66108a1ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java @@ -3,24 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a Volume */ @Generated +@JsonSerialize(using = ReadVolumeRequest.ReadVolumeRequestSerializer.class) +@JsonDeserialize(using = ReadVolumeRequest.ReadVolumeRequestDeserializer.class) public class ReadVolumeRequest { /** * Whether to include volumes in the response for which the principal can only access selective * metadata for */ - @JsonIgnore - @QueryParam("include_browse") private Boolean includeBrowse; /** The three-level (fully qualified) name of the volume */ - @JsonIgnore private String name; + private String name; public ReadVolumeRequest setIncludeBrowse(Boolean includeBrowse) { this.includeBrowse = includeBrowse; @@ -60,4 +68,40 @@ public String toString() { .add("name", name) .toString(); } + + ReadVolumeRequestPb toPb() { + ReadVolumeRequestPb pb = new ReadVolumeRequestPb(); + pb.setIncludeBrowse(includeBrowse); + pb.setName(name); + + return pb; + } + + static ReadVolumeRequest fromPb(ReadVolumeRequestPb pb) { + ReadVolumeRequest model = new ReadVolumeRequest(); + model.setIncludeBrowse(pb.getIncludeBrowse()); + model.setName(pb.getName()); + + return model; + } + + public static class ReadVolumeRequestSerializer extends JsonSerializer { + @Override + public void serialize(ReadVolumeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReadVolumeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReadVolumeRequestDeserializer extends JsonDeserializer { + @Override + public ReadVolumeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReadVolumeRequestPb pb = mapper.readValue(p, ReadVolumeRequestPb.class); + return ReadVolumeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequestPb.java new file mode 100755 index 000000000..5e1222303 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Volume */ +@Generated +class ReadVolumeRequestPb { + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + @JsonIgnore private String name; + + public ReadVolumeRequestPb setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ReadVolumeRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReadVolumeRequestPb that = (ReadVolumeRequestPb) o; + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse, name); + } + + @Override + public String toString() { + return new ToStringer(ReadVolumeRequestPb.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequest.java index 2ad857629..240fe1a35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RegenerateDashboardRequest.RegenerateDashboardRequestSerializer.class) +@JsonDeserialize(using = RegenerateDashboardRequest.RegenerateDashboardRequestDeserializer.class) public class RegenerateDashboardRequest { /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; /** * Optional argument to specify the warehouse for dashboard regeneration. If not specified, the * first running warehouse will be used. */ - @JsonProperty("warehouse_id") private String warehouseId; public RegenerateDashboardRequest setTableName(String tableName) { @@ -59,4 +68,43 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + RegenerateDashboardRequestPb toPb() { + RegenerateDashboardRequestPb pb = new RegenerateDashboardRequestPb(); + pb.setTableName(tableName); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static RegenerateDashboardRequest fromPb(RegenerateDashboardRequestPb pb) { + RegenerateDashboardRequest model = new RegenerateDashboardRequest(); + model.setTableName(pb.getTableName()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class RegenerateDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RegenerateDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegenerateDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegenerateDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public RegenerateDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegenerateDashboardRequestPb pb = mapper.readValue(p, RegenerateDashboardRequestPb.class); + return RegenerateDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequestPb.java new file mode 100755 index 000000000..fb47a2771 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegenerateDashboardRequestPb { + @JsonIgnore private String tableName; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public RegenerateDashboardRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public RegenerateDashboardRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegenerateDashboardRequestPb that = (RegenerateDashboardRequestPb) o; + return Objects.equals(tableName, that.tableName) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(tableName, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(RegenerateDashboardRequestPb.class) + .add("tableName", tableName) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponse.java index 078526fbe..b2221bb7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RegenerateDashboardResponse.RegenerateDashboardResponseSerializer.class) +@JsonDeserialize(using = RegenerateDashboardResponse.RegenerateDashboardResponseDeserializer.class) public class RegenerateDashboardResponse { /** Id of the regenerated monitoring dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The directory where the regenerated dashboard is stored. */ - @JsonProperty("parent_folder") private String parentFolder; public RegenerateDashboardResponse setDashboardId(String dashboardId) { @@ -56,4 +65,43 @@ public String toString() { .add("parentFolder", parentFolder) .toString(); } + + RegenerateDashboardResponsePb toPb() { + RegenerateDashboardResponsePb pb = new RegenerateDashboardResponsePb(); + pb.setDashboardId(dashboardId); + pb.setParentFolder(parentFolder); + + return pb; + } + + static RegenerateDashboardResponse fromPb(RegenerateDashboardResponsePb pb) { + RegenerateDashboardResponse model = new RegenerateDashboardResponse(); + model.setDashboardId(pb.getDashboardId()); + model.setParentFolder(pb.getParentFolder()); + + return model; + } + + public static class RegenerateDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RegenerateDashboardResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegenerateDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegenerateDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public RegenerateDashboardResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegenerateDashboardResponsePb pb = mapper.readValue(p, RegenerateDashboardResponsePb.class); + return RegenerateDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponsePb.java new file mode 100755 index 000000000..e7f06f51f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegenerateDashboardResponsePb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("parent_folder") + private String parentFolder; + + public RegenerateDashboardResponsePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public RegenerateDashboardResponsePb setParentFolder(String parentFolder) { + this.parentFolder = parentFolder; + return this; + } + + public String getParentFolder() { + return parentFolder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegenerateDashboardResponsePb that = (RegenerateDashboardResponsePb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(parentFolder, that.parentFolder); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, parentFolder); + } + + @Override + public String toString() { + return new ToStringer(RegenerateDashboardResponsePb.class) + .add("dashboardId", dashboardId) + .add("parentFolder", parentFolder) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java index 3bfde7bc5..72cf1450d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Registered model alias. */ @Generated +@JsonSerialize(using = RegisteredModelAlias.RegisteredModelAliasSerializer.class) +@JsonDeserialize(using = RegisteredModelAlias.RegisteredModelAliasDeserializer.class) public class RegisteredModelAlias { /** Name of the alias, e.g. 'champion' or 'latest_stable' */ - @JsonProperty("alias_name") private String aliasName; /** Integer version number of the model version to which this alias points. */ - @JsonProperty("version_num") private Long versionNum; public RegisteredModelAlias setAliasName(String aliasName) { @@ -56,4 +65,42 @@ public String toString() { .add("versionNum", versionNum) .toString(); } + + RegisteredModelAliasPb toPb() { + RegisteredModelAliasPb pb = new RegisteredModelAliasPb(); + pb.setAliasName(aliasName); + pb.setVersionNum(versionNum); + + return pb; + } + + static RegisteredModelAlias fromPb(RegisteredModelAliasPb pb) { + RegisteredModelAlias model = new RegisteredModelAlias(); + model.setAliasName(pb.getAliasName()); + model.setVersionNum(pb.getVersionNum()); + + return model; + } + + public static class RegisteredModelAliasSerializer extends JsonSerializer { + @Override + public void serialize( + RegisteredModelAlias value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelAliasPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelAliasDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelAlias deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelAliasPb pb = mapper.readValue(p, RegisteredModelAliasPb.class); + return RegisteredModelAlias.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAliasPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAliasPb.java new file mode 100755 index 000000000..d73cbdedf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAliasPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Registered model alias. */ +@Generated +class RegisteredModelAliasPb { + @JsonProperty("alias_name") + private String aliasName; + + @JsonProperty("version_num") + private Long versionNum; + + public RegisteredModelAliasPb setAliasName(String aliasName) { + this.aliasName = aliasName; + return this; + } + + public String getAliasName() { + return aliasName; + } + + public RegisteredModelAliasPb setVersionNum(Long versionNum) { + this.versionNum = versionNum; + return this; + } + + public Long getVersionNum() { + return versionNum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelAliasPb that = (RegisteredModelAliasPb) o; + return Objects.equals(aliasName, that.aliasName) && Objects.equals(versionNum, that.versionNum); + } + + @Override + public int hashCode() { + return Objects.hash(aliasName, versionNum); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelAliasPb.class) + .add("aliasName", aliasName) + .add("versionNum", versionNum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java index 072d58d13..cde77c7f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java @@ -4,69 +4,66 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RegisteredModelInfo.RegisteredModelInfoSerializer.class) +@JsonDeserialize(using = RegisteredModelInfo.RegisteredModelInfoDeserializer.class) public class RegisteredModelInfo { /** List of aliases associated with the registered model */ - @JsonProperty("aliases") private Collection aliases; /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** The name of the catalog where the schema and the registered model reside */ - @JsonProperty("catalog_name") private String catalogName; /** The comment attached to the registered model */ - @JsonProperty("comment") private String comment; /** Creation timestamp of the registered model in milliseconds since the Unix epoch */ - @JsonProperty("created_at") private Long createdAt; /** The identifier of the user who created the registered model */ - @JsonProperty("created_by") private String createdBy; /** The three-level (fully qualified) name of the registered model */ - @JsonProperty("full_name") private String fullName; /** The unique identifier of the metastore */ - @JsonProperty("metastore_id") private String metastoreId; /** The name of the registered model */ - @JsonProperty("name") private String name; /** The identifier of the user who owns the registered model */ - @JsonProperty("owner") private String owner; /** The name of the schema where the registered model resides */ - @JsonProperty("schema_name") private String schemaName; /** The storage location on the cloud under which model version data files are stored */ - @JsonProperty("storage_location") private String storageLocation; /** Last-update timestamp of the registered model in milliseconds since the Unix epoch */ - @JsonProperty("updated_at") private Long updatedAt; /** The identifier of the user who updated the registered model last time */ - @JsonProperty("updated_by") private String updatedBy; public RegisteredModelInfo setAliases(Collection aliases) { @@ -254,4 +251,65 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + RegisteredModelInfoPb toPb() { + RegisteredModelInfoPb pb = new RegisteredModelInfoPb(); + pb.setAliases(aliases); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setFullName(fullName); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setSchemaName(schemaName); + pb.setStorageLocation(storageLocation); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static RegisteredModelInfo fromPb(RegisteredModelInfoPb pb) { + RegisteredModelInfo model = new RegisteredModelInfo(); + model.setAliases(pb.getAliases()); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setFullName(pb.getFullName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setSchemaName(pb.getSchemaName()); + model.setStorageLocation(pb.getStorageLocation()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class RegisteredModelInfoSerializer extends JsonSerializer { + @Override + public void serialize(RegisteredModelInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelInfoDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelInfoPb pb = mapper.readValue(p, RegisteredModelInfoPb.class); + return RegisteredModelInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfoPb.java new file mode 100755 index 000000000..be196df99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfoPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegisteredModelInfoPb { + @JsonProperty("aliases") + private Collection aliases; + + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public RegisteredModelInfoPb setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public RegisteredModelInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public RegisteredModelInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public RegisteredModelInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public RegisteredModelInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public RegisteredModelInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public RegisteredModelInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public RegisteredModelInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public RegisteredModelInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public RegisteredModelInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public RegisteredModelInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public RegisteredModelInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public RegisteredModelInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public RegisteredModelInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelInfoPb that = (RegisteredModelInfoPb) o; + return Objects.equals(aliases, that.aliases) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + aliases, + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + fullName, + metastoreId, + name, + owner, + schemaName, + storageLocation, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelInfoPb.class) + .add("aliases", aliases) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("schemaName", schemaName) + .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java index bc2de4af6..d3b6e0802 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java @@ -21,7 +21,7 @@ public RegisteredModelInfo create(CreateRegisteredModelRequest request) { String path = "/api/2.1/unity-catalog/models"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegisteredModelInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -50,7 +50,7 @@ public void deleteAlias(DeleteAliasRequest request) { request.getFullName(), request.getAlias()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteAliasResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -62,7 +62,7 @@ public RegisteredModelInfo get(GetRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RegisteredModelInfo.class); } catch (IOException e) { @@ -75,7 +75,7 @@ public ListRegisteredModelsResponse list(ListRegisteredModelsRequest request) { String path = "/api/2.1/unity-catalog/models"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListRegisteredModelsResponse.class); } catch (IOException e) { @@ -91,7 +91,7 @@ public RegisteredModelAlias setAlias(SetRegisteredModelAliasRequest request) { request.getFullName(), request.getAlias()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegisteredModelAlias.class); @@ -105,7 +105,7 @@ public RegisteredModelInfo update(UpdateRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegisteredModelInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java index 05c2e571e..0c3226811 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java @@ -24,7 +24,7 @@ public GetQuotaResponse getQuota(GetQuotaRequest request) { request.getParentSecurableType(), request.getParentFullName(), request.getQuotaName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetQuotaResponse.class); } catch (IOException e) { @@ -37,7 +37,7 @@ public ListQuotasResponse listQuotas(ListQuotasRequest request) { String path = "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListQuotasResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java index de17a87b5..c519b6762 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Queue a metric refresh for a monitor */ @Generated +@JsonSerialize(using = RunRefreshRequest.RunRefreshRequestSerializer.class) +@JsonDeserialize(using = RunRefreshRequest.RunRefreshRequestDeserializer.class) public class RunRefreshRequest { /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; public RunRefreshRequest setTableName(String tableName) { this.tableName = tableName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(RunRefreshRequest.class).add("tableName", tableName).toString(); } + + RunRefreshRequestPb toPb() { + RunRefreshRequestPb pb = new RunRefreshRequestPb(); + pb.setTableName(tableName); + + return pb; + } + + static RunRefreshRequest fromPb(RunRefreshRequestPb pb) { + RunRefreshRequest model = new RunRefreshRequest(); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class RunRefreshRequestSerializer extends JsonSerializer { + @Override + public void serialize(RunRefreshRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunRefreshRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunRefreshRequestDeserializer extends JsonDeserializer { + @Override + public RunRefreshRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunRefreshRequestPb pb = mapper.readValue(p, RunRefreshRequestPb.class); + return RunRefreshRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequestPb.java new file mode 100755 index 000000000..91985a02a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Queue a metric refresh for a monitor */ +@Generated +class RunRefreshRequestPb { + @JsonIgnore private String tableName; + + public RunRefreshRequestPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunRefreshRequestPb that = (RunRefreshRequestPb) o; + return Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(tableName); + } + + @Override + public String toString() { + return new ToStringer(RunRefreshRequestPb.class).add("tableName", tableName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java index 6f7da5149..5c529f489 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -4,86 +4,79 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; /** Next ID: 40 */ @Generated +@JsonSerialize(using = SchemaInfo.SchemaInfoSerializer.class) +@JsonDeserialize(using = SchemaInfo.SchemaInfoDeserializer.class) public class SchemaInfo { /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** Name of parent catalog. */ - @JsonProperty("catalog_name") private String catalogName; /** The type of the parent catalog. */ - @JsonProperty("catalog_type") private CatalogType catalogType; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Time at which this schema was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of schema creator. */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; /** Whether predictive optimization should be enabled for this object and objects under it. */ - @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; /** Full name of schema, in form of __catalog_name__.__schema_name__. */ - @JsonProperty("full_name") private String fullName; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of schema, relative to parent catalog. */ - @JsonProperty("name") private String name; /** Username of current owner of schema. */ - @JsonProperty("owner") private String owner; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** The unique identifier of the schema. */ - @JsonProperty("schema_id") private String schemaId; /** Storage location for managed tables within schema. */ - @JsonProperty("storage_location") private String storageLocation; /** Storage root URL for managed tables within schema. */ - @JsonProperty("storage_root") private String storageRoot; /** Time at which this schema was created, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified schema. */ - @JsonProperty("updated_by") private String updatedBy; public SchemaInfo setBrowseOnly(Boolean browseOnly) { @@ -322,4 +315,71 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + SchemaInfoPb toPb() { + SchemaInfoPb pb = new SchemaInfoPb(); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setCatalogType(catalogType); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEffectivePredictiveOptimizationFlag(effectivePredictiveOptimizationFlag); + pb.setEnablePredictiveOptimization(enablePredictiveOptimization); + pb.setFullName(fullName); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setProperties(properties); + pb.setSchemaId(schemaId); + pb.setStorageLocation(storageLocation); + pb.setStorageRoot(storageRoot); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static SchemaInfo fromPb(SchemaInfoPb pb) { + SchemaInfo model = new SchemaInfo(); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setCatalogType(pb.getCatalogType()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEffectivePredictiveOptimizationFlag(pb.getEffectivePredictiveOptimizationFlag()); + model.setEnablePredictiveOptimization(pb.getEnablePredictiveOptimization()); + model.setFullName(pb.getFullName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setProperties(pb.getProperties()); + model.setSchemaId(pb.getSchemaId()); + model.setStorageLocation(pb.getStorageLocation()); + model.setStorageRoot(pb.getStorageRoot()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class SchemaInfoSerializer extends JsonSerializer { + @Override + public void serialize(SchemaInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SchemaInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SchemaInfoDeserializer extends JsonDeserializer { + @Override + public SchemaInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SchemaInfoPb pb = mapper.readValue(p, SchemaInfoPb.class); + return SchemaInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfoPb.java new file mode 100755 index 000000000..429598130 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfoPb.java @@ -0,0 +1,304 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** Next ID: 40 */ +@Generated +class SchemaInfoPb { + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("catalog_type") + private CatalogType catalogType; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; + + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("schema_id") + private String schemaId; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public SchemaInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public SchemaInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public SchemaInfoPb setCatalogType(CatalogType catalogType) { + this.catalogType = catalogType; + return this; + } + + public CatalogType getCatalogType() { + return catalogType; + } + + public SchemaInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public SchemaInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public SchemaInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public SchemaInfoPb setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; + return this; + } + + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; + } + + public SchemaInfoPb setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + + public SchemaInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public SchemaInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public SchemaInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SchemaInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public SchemaInfoPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public SchemaInfoPb setSchemaId(String schemaId) { + this.schemaId = schemaId; + return this; + } + + public String getSchemaId() { + return schemaId; + } + + public SchemaInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public SchemaInfoPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public SchemaInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public SchemaInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SchemaInfoPb that = (SchemaInfoPb) o; + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(catalogType, that.catalogType) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties) + && Objects.equals(schemaId, that.schemaId) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + browseOnly, + catalogName, + catalogType, + comment, + createdAt, + createdBy, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, + fullName, + metastoreId, + name, + owner, + properties, + schemaId, + storageLocation, + storageRoot, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(SchemaInfoPb.class) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("catalogType", catalogType) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("properties", properties) + .add("schemaId", schemaId) + .add("storageLocation", storageLocation) + .add("storageRoot", storageRoot) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java index e5470c89b..391763383 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java @@ -21,7 +21,7 @@ public SchemaInfo create(CreateSchema request) { String path = "/api/2.1/unity-catalog/schemas"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SchemaInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteSchemaRequest request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public SchemaInfo get(GetSchemaRequest request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SchemaInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListSchemasResponse list(ListSchemasRequest request) { String path = "/api/2.1/unity-catalog/schemas"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSchemasResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public SchemaInfo update(UpdateSchema request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SchemaInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java index 61fc05806..ba9002f67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SetArtifactAllowlist.SetArtifactAllowlistSerializer.class) +@JsonDeserialize(using = SetArtifactAllowlist.SetArtifactAllowlistDeserializer.class) public class SetArtifactAllowlist { /** A list of allowed artifact match patterns. */ - @JsonProperty("artifact_matchers") private Collection artifactMatchers; /** The artifact type of the allowlist. */ - @JsonIgnore private ArtifactType artifactType; + private ArtifactType artifactType; /** Time at which this artifact allowlist was set, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of the user who set the artifact allowlist. */ - @JsonProperty("created_by") private String createdBy; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; public SetArtifactAllowlist setArtifactMatchers(Collection artifactMatchers) { @@ -102,4 +108,48 @@ public String toString() { .add("metastoreId", metastoreId) .toString(); } + + SetArtifactAllowlistPb toPb() { + SetArtifactAllowlistPb pb = new SetArtifactAllowlistPb(); + pb.setArtifactMatchers(artifactMatchers); + pb.setArtifactType(artifactType); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setMetastoreId(metastoreId); + + return pb; + } + + static SetArtifactAllowlist fromPb(SetArtifactAllowlistPb pb) { + SetArtifactAllowlist model = new SetArtifactAllowlist(); + model.setArtifactMatchers(pb.getArtifactMatchers()); + model.setArtifactType(pb.getArtifactType()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setMetastoreId(pb.getMetastoreId()); + + return model; + } + + public static class SetArtifactAllowlistSerializer extends JsonSerializer { + @Override + public void serialize( + SetArtifactAllowlist value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetArtifactAllowlistPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetArtifactAllowlistDeserializer + extends JsonDeserializer { + @Override + public SetArtifactAllowlist deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetArtifactAllowlistPb pb = mapper.readValue(p, SetArtifactAllowlistPb.class); + return SetArtifactAllowlist.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlistPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlistPb.java new file mode 100755 index 000000000..b46fb6409 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlistPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SetArtifactAllowlistPb { + @JsonProperty("artifact_matchers") + private Collection artifactMatchers; + + @JsonIgnore private ArtifactType artifactType; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("metastore_id") + private String metastoreId; + + public SetArtifactAllowlistPb setArtifactMatchers(Collection artifactMatchers) { + this.artifactMatchers = artifactMatchers; + return this; + } + + public Collection getArtifactMatchers() { + return artifactMatchers; + } + + public SetArtifactAllowlistPb setArtifactType(ArtifactType artifactType) { + this.artifactType = artifactType; + return this; + } + + public ArtifactType getArtifactType() { + return artifactType; + } + + public SetArtifactAllowlistPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public SetArtifactAllowlistPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public SetArtifactAllowlistPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetArtifactAllowlistPb that = (SetArtifactAllowlistPb) o; + return Objects.equals(artifactMatchers, that.artifactMatchers) + && Objects.equals(artifactType, that.artifactType) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(metastoreId, that.metastoreId); + } + + @Override + public int hashCode() { + return Objects.hash(artifactMatchers, artifactType, createdAt, createdBy, metastoreId); + } + + @Override + public String toString() { + return new ToStringer(SetArtifactAllowlistPb.class) + .add("artifactMatchers", artifactMatchers) + .add("artifactType", artifactType) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("metastoreId", metastoreId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java index 3c6c39fde..7a7b00792 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java @@ -4,21 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = SetRegisteredModelAliasRequest.SetRegisteredModelAliasRequestSerializer.class) +@JsonDeserialize( + using = SetRegisteredModelAliasRequest.SetRegisteredModelAliasRequestDeserializer.class) public class SetRegisteredModelAliasRequest { /** The name of the alias */ - @JsonProperty("alias") private String alias; /** Full name of the registered model */ - @JsonProperty("full_name") private String fullName; /** The version number of the model version to which the alias points */ - @JsonProperty("version_num") private Long versionNum; public SetRegisteredModelAliasRequest setAlias(String alias) { @@ -71,4 +81,46 @@ public String toString() { .add("versionNum", versionNum) .toString(); } + + SetRegisteredModelAliasRequestPb toPb() { + SetRegisteredModelAliasRequestPb pb = new SetRegisteredModelAliasRequestPb(); + pb.setAlias(alias); + pb.setFullName(fullName); + pb.setVersionNum(versionNum); + + return pb; + } + + static SetRegisteredModelAliasRequest fromPb(SetRegisteredModelAliasRequestPb pb) { + SetRegisteredModelAliasRequest model = new SetRegisteredModelAliasRequest(); + model.setAlias(pb.getAlias()); + model.setFullName(pb.getFullName()); + model.setVersionNum(pb.getVersionNum()); + + return model; + } + + public static class SetRegisteredModelAliasRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SetRegisteredModelAliasRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetRegisteredModelAliasRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetRegisteredModelAliasRequestDeserializer + extends JsonDeserializer { + @Override + public SetRegisteredModelAliasRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetRegisteredModelAliasRequestPb pb = + mapper.readValue(p, SetRegisteredModelAliasRequestPb.class); + return SetRegisteredModelAliasRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequestPb.java new file mode 100755 index 000000000..4163cfc9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SetRegisteredModelAliasRequestPb { + @JsonProperty("alias") + private String alias; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("version_num") + private Long versionNum; + + public SetRegisteredModelAliasRequestPb setAlias(String alias) { + this.alias = alias; + return this; + } + + public String getAlias() { + return alias; + } + + public SetRegisteredModelAliasRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public SetRegisteredModelAliasRequestPb setVersionNum(Long versionNum) { + this.versionNum = versionNum; + return this; + } + + public Long getVersionNum() { + return versionNum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetRegisteredModelAliasRequestPb that = (SetRegisteredModelAliasRequestPb) o; + return Objects.equals(alias, that.alias) + && Objects.equals(fullName, that.fullName) + && Objects.equals(versionNum, that.versionNum); + } + + @Override + public int hashCode() { + return Objects.hash(alias, fullName, versionNum); + } + + @Override + public String toString() { + return new ToStringer(SetRegisteredModelAliasRequestPb.class) + .add("alias", alias) + .add("fullName", fullName) + .add("versionNum", versionNum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java index d0325a25d..25abcd9a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Server-Side Encryption properties for clients communicating with AWS s3. */ @Generated +@JsonSerialize(using = SseEncryptionDetails.SseEncryptionDetailsSerializer.class) +@JsonDeserialize(using = SseEncryptionDetails.SseEncryptionDetailsDeserializer.class) public class SseEncryptionDetails { /** Sets the value of the 'x-amz-server-side-encryption' header in S3 request. */ - @JsonProperty("algorithm") private SseEncryptionDetailsAlgorithm algorithm; /** * Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". * Sets the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header. */ - @JsonProperty("aws_kms_key_arn") private String awsKmsKeyArn; public SseEncryptionDetails setAlgorithm(SseEncryptionDetailsAlgorithm algorithm) { @@ -60,4 +69,42 @@ public String toString() { .add("awsKmsKeyArn", awsKmsKeyArn) .toString(); } + + SseEncryptionDetailsPb toPb() { + SseEncryptionDetailsPb pb = new SseEncryptionDetailsPb(); + pb.setAlgorithm(algorithm); + pb.setAwsKmsKeyArn(awsKmsKeyArn); + + return pb; + } + + static SseEncryptionDetails fromPb(SseEncryptionDetailsPb pb) { + SseEncryptionDetails model = new SseEncryptionDetails(); + model.setAlgorithm(pb.getAlgorithm()); + model.setAwsKmsKeyArn(pb.getAwsKmsKeyArn()); + + return model; + } + + public static class SseEncryptionDetailsSerializer extends JsonSerializer { + @Override + public void serialize( + SseEncryptionDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SseEncryptionDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SseEncryptionDetailsDeserializer + extends JsonDeserializer { + @Override + public SseEncryptionDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SseEncryptionDetailsPb pb = mapper.readValue(p, SseEncryptionDetailsPb.class); + return SseEncryptionDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsPb.java new file mode 100755 index 000000000..d93baaf85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Server-Side Encryption properties for clients communicating with AWS s3. */ +@Generated +class SseEncryptionDetailsPb { + @JsonProperty("algorithm") + private SseEncryptionDetailsAlgorithm algorithm; + + @JsonProperty("aws_kms_key_arn") + private String awsKmsKeyArn; + + public SseEncryptionDetailsPb setAlgorithm(SseEncryptionDetailsAlgorithm algorithm) { + this.algorithm = algorithm; + return this; + } + + public SseEncryptionDetailsAlgorithm getAlgorithm() { + return algorithm; + } + + public SseEncryptionDetailsPb setAwsKmsKeyArn(String awsKmsKeyArn) { + this.awsKmsKeyArn = awsKmsKeyArn; + return this; + } + + public String getAwsKmsKeyArn() { + return awsKmsKeyArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SseEncryptionDetailsPb that = (SseEncryptionDetailsPb) o; + return Objects.equals(algorithm, that.algorithm) + && Objects.equals(awsKmsKeyArn, that.awsKmsKeyArn); + } + + @Override + public int hashCode() { + return Objects.hash(algorithm, awsKmsKeyArn); + } + + @Override + public String toString() { + return new ToStringer(SseEncryptionDetailsPb.class) + .add("algorithm", algorithm) + .add("awsKmsKeyArn", awsKmsKeyArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java index b3d32add0..f3a30f6dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java @@ -4,81 +4,74 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StorageCredentialInfo.StorageCredentialInfoSerializer.class) +@JsonDeserialize(using = StorageCredentialInfo.StorageCredentialInfoDeserializer.class) public class StorageCredentialInfo { /** The AWS IAM role configuration. */ - @JsonProperty("aws_iam_role") private AwsIamRoleResponse awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentityResponse azureManagedIdentity; /** The Azure service principal configuration. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** The Cloudflare API token configuration. */ - @JsonProperty("cloudflare_api_token") private CloudflareApiToken cloudflareApiToken; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** Time at which this Credential was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of credential creator. */ - @JsonProperty("created_by") private String createdBy; /** The Databricks managed GCP service account configuration. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountResponse databricksGcpServiceAccount; /** The full name of the credential. */ - @JsonProperty("full_name") private String fullName; /** The unique identifier of the credential. */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** The credential name. The name must be unique within the metastore. */ - @JsonProperty("name") private String name; /** Username of current owner of credential. */ - @JsonProperty("owner") private String owner; /** Whether the storage credential is only usable for read operations. */ - @JsonProperty("read_only") private Boolean readOnly; /** Time at which this credential was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the credential. */ - @JsonProperty("updated_by") private String updatedBy; /** Whether this credential is the current metastore's root storage credential. */ - @JsonProperty("used_for_managed_storage") private Boolean usedForManagedStorage; public StorageCredentialInfo setAwsIamRole(AwsIamRoleResponse awsIamRole) { @@ -317,4 +310,75 @@ public String toString() { .add("usedForManagedStorage", usedForManagedStorage) .toString(); } + + StorageCredentialInfoPb toPb() { + StorageCredentialInfoPb pb = new StorageCredentialInfoPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setCloudflareApiToken(cloudflareApiToken); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setFullName(fullName); + pb.setId(id); + pb.setIsolationMode(isolationMode); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setReadOnly(readOnly); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setUsedForManagedStorage(usedForManagedStorage); + + return pb; + } + + static StorageCredentialInfo fromPb(StorageCredentialInfoPb pb) { + StorageCredentialInfo model = new StorageCredentialInfo(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setCloudflareApiToken(pb.getCloudflareApiToken()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setFullName(pb.getFullName()); + model.setId(pb.getId()); + model.setIsolationMode(pb.getIsolationMode()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setReadOnly(pb.getReadOnly()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setUsedForManagedStorage(pb.getUsedForManagedStorage()); + + return model; + } + + public static class StorageCredentialInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + StorageCredentialInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StorageCredentialInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StorageCredentialInfoDeserializer + extends JsonDeserializer { + @Override + public StorageCredentialInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StorageCredentialInfoPb pb = mapper.readValue(p, StorageCredentialInfoPb.class); + return StorageCredentialInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfoPb.java new file mode 100755 index 000000000..6fbae4504 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfoPb.java @@ -0,0 +1,302 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StorageCredentialInfoPb { + @JsonProperty("aws_iam_role") + private AwsIamRoleResponse awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityResponse azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountResponse databricksGcpServiceAccount; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("id") + private String id; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("used_for_managed_storage") + private Boolean usedForManagedStorage; + + public StorageCredentialInfoPb setAwsIamRole(AwsIamRoleResponse awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleResponse getAwsIamRole() { + return awsIamRole; + } + + public StorageCredentialInfoPb setAzureManagedIdentity( + AzureManagedIdentityResponse azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityResponse getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public StorageCredentialInfoPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public StorageCredentialInfoPb setCloudflareApiToken(CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public StorageCredentialInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public StorageCredentialInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public StorageCredentialInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public StorageCredentialInfoPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountResponse databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountResponse getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public StorageCredentialInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public StorageCredentialInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public StorageCredentialInfoPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public StorageCredentialInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public StorageCredentialInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public StorageCredentialInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public StorageCredentialInfoPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public StorageCredentialInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public StorageCredentialInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public StorageCredentialInfoPb setUsedForManagedStorage(Boolean usedForManagedStorage) { + this.usedForManagedStorage = usedForManagedStorage; + return this; + } + + public Boolean getUsedForManagedStorage() { + return usedForManagedStorage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StorageCredentialInfoPb that = (StorageCredentialInfoPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(fullName, that.fullName) + && Objects.equals(id, that.id) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(usedForManagedStorage, that.usedForManagedStorage); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + comment, + createdAt, + createdBy, + databricksGcpServiceAccount, + fullName, + id, + isolationMode, + metastoreId, + name, + owner, + readOnly, + updatedAt, + updatedBy, + usedForManagedStorage); + } + + @Override + public String toString() { + return new ToStringer(StorageCredentialInfoPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("fullName", fullName) + .add("id", id) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("readOnly", readOnly) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("usedForManagedStorage", usedForManagedStorage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java index 3f1390c9f..ed2035a9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java @@ -21,7 +21,7 @@ public StorageCredentialInfo create(CreateStorageCredential request) { String path = "/api/2.1/unity-catalog/storage-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, StorageCredentialInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteStorageCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, StorageCredentialInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListStorageCredentialsResponse list(ListStorageCredentialsRequest request String path = "/api/2.1/unity-catalog/storage-credentials"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListStorageCredentialsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public StorageCredentialInfo update(UpdateStorageCredential request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, StorageCredentialInfo.class); @@ -88,7 +88,7 @@ public ValidateStorageCredentialResponse validate(ValidateStorageCredential requ String path = "/api/2.1/unity-catalog/validate-storage-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ValidateStorageCredentialResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java index 03d76e0f4..4d7b6f186 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SystemSchemaInfo.SystemSchemaInfoSerializer.class) +@JsonDeserialize(using = SystemSchemaInfo.SystemSchemaInfoDeserializer.class) public class SystemSchemaInfo { /** Name of the system schema. */ - @JsonProperty("schema") private String schema; /** @@ -18,7 +28,6 @@ public class SystemSchemaInfo { * is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | * ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE */ - @JsonProperty("state") private String state; public SystemSchemaInfo setSchema(String schema) { @@ -59,4 +68,40 @@ public String toString() { .add("state", state) .toString(); } + + SystemSchemaInfoPb toPb() { + SystemSchemaInfoPb pb = new SystemSchemaInfoPb(); + pb.setSchema(schema); + pb.setState(state); + + return pb; + } + + static SystemSchemaInfo fromPb(SystemSchemaInfoPb pb) { + SystemSchemaInfo model = new SystemSchemaInfo(); + model.setSchema(pb.getSchema()); + model.setState(pb.getState()); + + return model; + } + + public static class SystemSchemaInfoSerializer extends JsonSerializer { + @Override + public void serialize(SystemSchemaInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SystemSchemaInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SystemSchemaInfoDeserializer extends JsonDeserializer { + @Override + public SystemSchemaInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SystemSchemaInfoPb pb = mapper.readValue(p, SystemSchemaInfoPb.class); + return SystemSchemaInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoPb.java new file mode 100755 index 000000000..fa86dbc58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SystemSchemaInfoPb { + @JsonProperty("schema") + private String schema; + + @JsonProperty("state") + private String state; + + public SystemSchemaInfoPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public SystemSchemaInfoPb setState(String state) { + this.state = state; + return this; + } + + public String getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SystemSchemaInfoPb that = (SystemSchemaInfoPb) o; + return Objects.equals(schema, that.schema) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(schema, state); + } + + @Override + public String toString() { + return new ToStringer(SystemSchemaInfoPb.class) + .add("schema", schema) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java index 4adce737b..a2ca5a7bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java @@ -24,7 +24,7 @@ public void disable(DisableRequest request) { request.getMetastoreId(), request.getSchemaName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DisableResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public void enable(EnableRequest request) { request.getMetastoreId(), request.getSchemaName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EnableResponse.class); @@ -56,7 +56,7 @@ public ListSystemSchemasResponse list(ListSystemSchemasRequest request) { "/api/2.1/unity-catalog/metastores/%s/systemschemas", request.getMetastoreId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSystemSchemasResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java index 43699a737..e8cadd4bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,17 +21,16 @@ * __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. */ @Generated +@JsonSerialize(using = TableConstraint.TableConstraintSerializer.class) +@JsonDeserialize(using = TableConstraint.TableConstraintDeserializer.class) public class TableConstraint { /** */ - @JsonProperty("foreign_key_constraint") private ForeignKeyConstraint foreignKeyConstraint; /** */ - @JsonProperty("named_table_constraint") private NamedTableConstraint namedTableConstraint; /** */ - @JsonProperty("primary_key_constraint") private PrimaryKeyConstraint primaryKeyConstraint; public TableConstraint setForeignKeyConstraint(ForeignKeyConstraint foreignKeyConstraint) { @@ -75,4 +83,42 @@ public String toString() { .add("primaryKeyConstraint", primaryKeyConstraint) .toString(); } + + TableConstraintPb toPb() { + TableConstraintPb pb = new TableConstraintPb(); + pb.setForeignKeyConstraint(foreignKeyConstraint); + pb.setNamedTableConstraint(namedTableConstraint); + pb.setPrimaryKeyConstraint(primaryKeyConstraint); + + return pb; + } + + static TableConstraint fromPb(TableConstraintPb pb) { + TableConstraint model = new TableConstraint(); + model.setForeignKeyConstraint(pb.getForeignKeyConstraint()); + model.setNamedTableConstraint(pb.getNamedTableConstraint()); + model.setPrimaryKeyConstraint(pb.getPrimaryKeyConstraint()); + + return model; + } + + public static class TableConstraintSerializer extends JsonSerializer { + @Override + public void serialize(TableConstraint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableConstraintPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableConstraintDeserializer extends JsonDeserializer { + @Override + public TableConstraint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableConstraintPb pb = mapper.readValue(p, TableConstraintPb.class); + return TableConstraint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintPb.java new file mode 100755 index 000000000..d22376e77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A table constraint, as defined by *one* of the following fields being set: + * __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. + */ +@Generated +class TableConstraintPb { + @JsonProperty("foreign_key_constraint") + private ForeignKeyConstraint foreignKeyConstraint; + + @JsonProperty("named_table_constraint") + private NamedTableConstraint namedTableConstraint; + + @JsonProperty("primary_key_constraint") + private PrimaryKeyConstraint primaryKeyConstraint; + + public TableConstraintPb setForeignKeyConstraint(ForeignKeyConstraint foreignKeyConstraint) { + this.foreignKeyConstraint = foreignKeyConstraint; + return this; + } + + public ForeignKeyConstraint getForeignKeyConstraint() { + return foreignKeyConstraint; + } + + public TableConstraintPb setNamedTableConstraint(NamedTableConstraint namedTableConstraint) { + this.namedTableConstraint = namedTableConstraint; + return this; + } + + public NamedTableConstraint getNamedTableConstraint() { + return namedTableConstraint; + } + + public TableConstraintPb setPrimaryKeyConstraint(PrimaryKeyConstraint primaryKeyConstraint) { + this.primaryKeyConstraint = primaryKeyConstraint; + return this; + } + + public PrimaryKeyConstraint getPrimaryKeyConstraint() { + return primaryKeyConstraint; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableConstraintPb that = (TableConstraintPb) o; + return Objects.equals(foreignKeyConstraint, that.foreignKeyConstraint) + && Objects.equals(namedTableConstraint, that.namedTableConstraint) + && Objects.equals(primaryKeyConstraint, that.primaryKeyConstraint); + } + + @Override + public int hashCode() { + return Objects.hash(foreignKeyConstraint, namedTableConstraint, primaryKeyConstraint); + } + + @Override + public String toString() { + return new ToStringer(TableConstraintPb.class) + .add("foreignKeyConstraint", foreignKeyConstraint) + .add("namedTableConstraint", namedTableConstraint) + .add("primaryKeyConstraint", primaryKeyConstraint) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java index e026a9c3f..3df72a786 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java @@ -21,7 +21,7 @@ public TableConstraint create(CreateTableConstraint request) { String path = "/api/2.1/unity-catalog/constraints"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TableConstraint.class); @@ -35,7 +35,7 @@ public void delete(DeleteTableConstraintRequest request) { String path = String.format("/api/2.1/unity-catalog/constraints/%s", request.getFullName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java index 6826680c6..8568a416d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A table that is dependent on a SQL object. */ @Generated +@JsonSerialize(using = TableDependency.TableDependencySerializer.class) +@JsonDeserialize(using = TableDependency.TableDependencyDeserializer.class) public class TableDependency { /** * Full name of the dependent table, in the form of * __catalog_name__.__schema_name__.__table_name__. */ - @JsonProperty("table_full_name") private String tableFullName; public TableDependency setTableFullName(String tableFullName) { @@ -43,4 +53,38 @@ public int hashCode() { public String toString() { return new ToStringer(TableDependency.class).add("tableFullName", tableFullName).toString(); } + + TableDependencyPb toPb() { + TableDependencyPb pb = new TableDependencyPb(); + pb.setTableFullName(tableFullName); + + return pb; + } + + static TableDependency fromPb(TableDependencyPb pb) { + TableDependency model = new TableDependency(); + model.setTableFullName(pb.getTableFullName()); + + return model; + } + + public static class TableDependencySerializer extends JsonSerializer { + @Override + public void serialize(TableDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableDependencyDeserializer extends JsonDeserializer { + @Override + public TableDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableDependencyPb pb = mapper.readValue(p, TableDependencyPb.class); + return TableDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependencyPb.java new file mode 100755 index 000000000..9006b349d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependencyPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A table that is dependent on a SQL object. */ +@Generated +class TableDependencyPb { + @JsonProperty("table_full_name") + private String tableFullName; + + public TableDependencyPb setTableFullName(String tableFullName) { + this.tableFullName = tableFullName; + return this; + } + + public String getTableFullName() { + return tableFullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableDependencyPb that = (TableDependencyPb) o; + return Objects.equals(tableFullName, that.tableFullName); + } + + @Override + public int hashCode() { + return Objects.hash(tableFullName); + } + + @Override + public String toString() { + return new ToStringer(TableDependencyPb.class).add("tableFullName", tableFullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java index 98eb5fb59..a27620010 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TableExistsResponse.TableExistsResponseSerializer.class) +@JsonDeserialize(using = TableExistsResponse.TableExistsResponseDeserializer.class) public class TableExistsResponse { /** Whether the table exists or not. */ - @JsonProperty("table_exists") private Boolean tableExists; public TableExistsResponse setTableExists(Boolean tableExists) { @@ -39,4 +49,39 @@ public int hashCode() { public String toString() { return new ToStringer(TableExistsResponse.class).add("tableExists", tableExists).toString(); } + + TableExistsResponsePb toPb() { + TableExistsResponsePb pb = new TableExistsResponsePb(); + pb.setTableExists(tableExists); + + return pb; + } + + static TableExistsResponse fromPb(TableExistsResponsePb pb) { + TableExistsResponse model = new TableExistsResponse(); + model.setTableExists(pb.getTableExists()); + + return model; + } + + public static class TableExistsResponseSerializer extends JsonSerializer { + @Override + public void serialize(TableExistsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableExistsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableExistsResponseDeserializer + extends JsonDeserializer { + @Override + public TableExistsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableExistsResponsePb pb = mapper.readValue(p, TableExistsResponsePb.class); + return TableExistsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponsePb.java new file mode 100755 index 000000000..4fcf9d338 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TableExistsResponsePb { + @JsonProperty("table_exists") + private Boolean tableExists; + + public TableExistsResponsePb setTableExists(Boolean tableExists) { + this.tableExists = tableExists; + return this; + } + + public Boolean getTableExists() { + return tableExists; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableExistsResponsePb that = (TableExistsResponsePb) o; + return Objects.equals(tableExists, that.tableExists); + } + + @Override + public int hashCode() { + return Objects.hash(tableExists); + } + + @Override + public String toString() { + return new ToStringer(TableExistsResponsePb.class).add("tableExists", tableExists).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index 94c4d8abf..c808b1f7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -4,152 +4,132 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = TableInfo.TableInfoSerializer.class) +@JsonDeserialize(using = TableInfo.TableInfoDeserializer.class) public class TableInfo { /** The AWS access point to use when accesing s3 for this external location. */ - @JsonProperty("access_point") private String accessPoint; /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** Name of parent catalog. */ - @JsonProperty("catalog_name") private String catalogName; /** The array of __ColumnInfo__ definitions of the table's columns. */ - @JsonProperty("columns") private Collection columns; /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Time at which this table was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of table creator. */ - @JsonProperty("created_by") private String createdBy; /** Unique ID of the Data Access Configuration to use with the table data. */ - @JsonProperty("data_access_configuration_id") private String dataAccessConfigurationId; /** Data source format */ - @JsonProperty("data_source_format") private DataSourceFormat dataSourceFormat; /** * Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not * deleted. */ - @JsonProperty("deleted_at") private Long deletedAt; /** Information pertaining to current state of the delta table. */ - @JsonProperty("delta_runtime_properties_kvpairs") private DeltaRuntimePropertiesKvPairs deltaRuntimePropertiesKvpairs; /** */ - @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; /** */ - @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; /** Encryption options that apply to clients connecting to cloud storage. */ - @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; /** Full name of table, in form of __catalog_name__.__schema_name__.__table_name__ */ - @JsonProperty("full_name") private String fullName; /** Unique identifier of parent metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of table, relative to parent schema. */ - @JsonProperty("name") private String name; /** Username of current owner of table. */ - @JsonProperty("owner") private String owner; /** * The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, * Streaming Table, etc.). */ - @JsonProperty("pipeline_id") private String pipelineId; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; /** */ - @JsonProperty("row_filter") private TableRowFilter rowFilter; /** Name of parent schema relative to its parent catalog. */ - @JsonProperty("schema_name") private String schemaName; /** List of schemes whose objects can be referenced without qualification. */ - @JsonProperty("sql_path") private String sqlPath; /** * Name of the storage credential, when a storage credential is configured for use with this * table. */ - @JsonProperty("storage_credential_name") private String storageCredentialName; /** Storage root URL for table (for **MANAGED**, **EXTERNAL** tables) */ - @JsonProperty("storage_location") private String storageLocation; /** * List of table constraints. Note: this field is not set in the output of the __listTables__ API. */ - @JsonProperty("table_constraints") private Collection tableConstraints; /** The unique identifier of the table. */ - @JsonProperty("table_id") private String tableId; /** */ - @JsonProperty("table_type") private TableType tableType; /** Time at which this table was last modified, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified the table. */ - @JsonProperty("updated_by") private String updatedBy; /** * View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or * **STREAMING_TABLE**) */ - @JsonProperty("view_definition") private String viewDefinition; /** @@ -158,7 +138,6 @@ public class TableInfo { * list, the dependency is provided but is empty; - when DependencyList is not an empty list, * dependencies are provided and recorded. */ - @JsonProperty("view_dependencies") private DependencyList viewDependencies; public TableInfo setAccessPoint(String accessPoint) { @@ -566,4 +545,99 @@ public String toString() { .add("viewDependencies", viewDependencies) .toString(); } + + TableInfoPb toPb() { + TableInfoPb pb = new TableInfoPb(); + pb.setAccessPoint(accessPoint); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setColumns(columns); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDataAccessConfigurationId(dataAccessConfigurationId); + pb.setDataSourceFormat(dataSourceFormat); + pb.setDeletedAt(deletedAt); + pb.setDeltaRuntimePropertiesKvpairs(deltaRuntimePropertiesKvpairs); + pb.setEffectivePredictiveOptimizationFlag(effectivePredictiveOptimizationFlag); + pb.setEnablePredictiveOptimization(enablePredictiveOptimization); + pb.setEncryptionDetails(encryptionDetails); + pb.setFullName(fullName); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setPipelineId(pipelineId); + pb.setProperties(properties); + pb.setRowFilter(rowFilter); + pb.setSchemaName(schemaName); + pb.setSqlPath(sqlPath); + pb.setStorageCredentialName(storageCredentialName); + pb.setStorageLocation(storageLocation); + pb.setTableConstraints(tableConstraints); + pb.setTableId(tableId); + pb.setTableType(tableType); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setViewDefinition(viewDefinition); + pb.setViewDependencies(viewDependencies); + + return pb; + } + + static TableInfo fromPb(TableInfoPb pb) { + TableInfo model = new TableInfo(); + model.setAccessPoint(pb.getAccessPoint()); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setColumns(pb.getColumns()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDataAccessConfigurationId(pb.getDataAccessConfigurationId()); + model.setDataSourceFormat(pb.getDataSourceFormat()); + model.setDeletedAt(pb.getDeletedAt()); + model.setDeltaRuntimePropertiesKvpairs(pb.getDeltaRuntimePropertiesKvpairs()); + model.setEffectivePredictiveOptimizationFlag(pb.getEffectivePredictiveOptimizationFlag()); + model.setEnablePredictiveOptimization(pb.getEnablePredictiveOptimization()); + model.setEncryptionDetails(pb.getEncryptionDetails()); + model.setFullName(pb.getFullName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPipelineId(pb.getPipelineId()); + model.setProperties(pb.getProperties()); + model.setRowFilter(pb.getRowFilter()); + model.setSchemaName(pb.getSchemaName()); + model.setSqlPath(pb.getSqlPath()); + model.setStorageCredentialName(pb.getStorageCredentialName()); + model.setStorageLocation(pb.getStorageLocation()); + model.setTableConstraints(pb.getTableConstraints()); + model.setTableId(pb.getTableId()); + model.setTableType(pb.getTableType()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setViewDefinition(pb.getViewDefinition()); + model.setViewDependencies(pb.getViewDependencies()); + + return model; + } + + public static class TableInfoSerializer extends JsonSerializer { + @Override + public void serialize(TableInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableInfoDeserializer extends JsonDeserializer { + @Override + public TableInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableInfoPb pb = mapper.readValue(p, TableInfoPb.class); + return TableInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfoPb.java new file mode 100755 index 000000000..6ce2cee8c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfoPb.java @@ -0,0 +1,515 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class TableInfoPb { + @JsonProperty("access_point") + private String accessPoint; + + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("columns") + private Collection columns; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("data_access_configuration_id") + private String dataAccessConfigurationId; + + @JsonProperty("data_source_format") + private DataSourceFormat dataSourceFormat; + + @JsonProperty("deleted_at") + private Long deletedAt; + + @JsonProperty("delta_runtime_properties_kvpairs") + private DeltaRuntimePropertiesKvPairs deltaRuntimePropertiesKvpairs; + + @JsonProperty("effective_predictive_optimization_flag") + private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; + + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + + @JsonProperty("encryption_details") + private EncryptionDetails encryptionDetails; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("properties") + private Map properties; + + @JsonProperty("row_filter") + private TableRowFilter rowFilter; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("sql_path") + private String sqlPath; + + @JsonProperty("storage_credential_name") + private String storageCredentialName; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("table_constraints") + private Collection tableConstraints; + + @JsonProperty("table_id") + private String tableId; + + @JsonProperty("table_type") + private TableType tableType; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("view_definition") + private String viewDefinition; + + @JsonProperty("view_dependencies") + private DependencyList viewDependencies; + + public TableInfoPb setAccessPoint(String accessPoint) { + this.accessPoint = accessPoint; + return this; + } + + public String getAccessPoint() { + return accessPoint; + } + + public TableInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public TableInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public TableInfoPb setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public TableInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public TableInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public TableInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public TableInfoPb setDataAccessConfigurationId(String dataAccessConfigurationId) { + this.dataAccessConfigurationId = dataAccessConfigurationId; + return this; + } + + public String getDataAccessConfigurationId() { + return dataAccessConfigurationId; + } + + public TableInfoPb setDataSourceFormat(DataSourceFormat dataSourceFormat) { + this.dataSourceFormat = dataSourceFormat; + return this; + } + + public DataSourceFormat getDataSourceFormat() { + return dataSourceFormat; + } + + public TableInfoPb setDeletedAt(Long deletedAt) { + this.deletedAt = deletedAt; + return this; + } + + public Long getDeletedAt() { + return deletedAt; + } + + public TableInfoPb setDeltaRuntimePropertiesKvpairs( + DeltaRuntimePropertiesKvPairs deltaRuntimePropertiesKvpairs) { + this.deltaRuntimePropertiesKvpairs = deltaRuntimePropertiesKvpairs; + return this; + } + + public DeltaRuntimePropertiesKvPairs getDeltaRuntimePropertiesKvpairs() { + return deltaRuntimePropertiesKvpairs; + } + + public TableInfoPb setEffectivePredictiveOptimizationFlag( + EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { + this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; + return this; + } + + public EffectivePredictiveOptimizationFlag getEffectivePredictiveOptimizationFlag() { + return effectivePredictiveOptimizationFlag; + } + + public TableInfoPb setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + + public TableInfoPb setEncryptionDetails(EncryptionDetails encryptionDetails) { + this.encryptionDetails = encryptionDetails; + return this; + } + + public EncryptionDetails getEncryptionDetails() { + return encryptionDetails; + } + + public TableInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public TableInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public TableInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public TableInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public TableInfoPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public TableInfoPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public TableInfoPb setRowFilter(TableRowFilter rowFilter) { + this.rowFilter = rowFilter; + return this; + } + + public TableRowFilter getRowFilter() { + return rowFilter; + } + + public TableInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public TableInfoPb setSqlPath(String sqlPath) { + this.sqlPath = sqlPath; + return this; + } + + public String getSqlPath() { + return sqlPath; + } + + public TableInfoPb setStorageCredentialName(String storageCredentialName) { + this.storageCredentialName = storageCredentialName; + return this; + } + + public String getStorageCredentialName() { + return storageCredentialName; + } + + public TableInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public TableInfoPb setTableConstraints(Collection tableConstraints) { + this.tableConstraints = tableConstraints; + return this; + } + + public Collection getTableConstraints() { + return tableConstraints; + } + + public TableInfoPb setTableId(String tableId) { + this.tableId = tableId; + return this; + } + + public String getTableId() { + return tableId; + } + + public TableInfoPb setTableType(TableType tableType) { + this.tableType = tableType; + return this; + } + + public TableType getTableType() { + return tableType; + } + + public TableInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public TableInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public TableInfoPb setViewDefinition(String viewDefinition) { + this.viewDefinition = viewDefinition; + return this; + } + + public String getViewDefinition() { + return viewDefinition; + } + + public TableInfoPb setViewDependencies(DependencyList viewDependencies) { + this.viewDependencies = viewDependencies; + return this; + } + + public DependencyList getViewDependencies() { + return viewDependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableInfoPb that = (TableInfoPb) o; + return Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(columns, that.columns) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(dataAccessConfigurationId, that.dataAccessConfigurationId) + && Objects.equals(dataSourceFormat, that.dataSourceFormat) + && Objects.equals(deletedAt, that.deletedAt) + && Objects.equals(deltaRuntimePropertiesKvpairs, that.deltaRuntimePropertiesKvpairs) + && Objects.equals( + effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) + && Objects.equals(encryptionDetails, that.encryptionDetails) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(properties, that.properties) + && Objects.equals(rowFilter, that.rowFilter) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(sqlPath, that.sqlPath) + && Objects.equals(storageCredentialName, that.storageCredentialName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(tableConstraints, that.tableConstraints) + && Objects.equals(tableId, that.tableId) + && Objects.equals(tableType, that.tableType) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(viewDefinition, that.viewDefinition) + && Objects.equals(viewDependencies, that.viewDependencies); + } + + @Override + public int hashCode() { + return Objects.hash( + accessPoint, + browseOnly, + catalogName, + columns, + comment, + createdAt, + createdBy, + dataAccessConfigurationId, + dataSourceFormat, + deletedAt, + deltaRuntimePropertiesKvpairs, + effectivePredictiveOptimizationFlag, + enablePredictiveOptimization, + encryptionDetails, + fullName, + metastoreId, + name, + owner, + pipelineId, + properties, + rowFilter, + schemaName, + sqlPath, + storageCredentialName, + storageLocation, + tableConstraints, + tableId, + tableType, + updatedAt, + updatedBy, + viewDefinition, + viewDependencies); + } + + @Override + public String toString() { + return new ToStringer(TableInfoPb.class) + .add("accessPoint", accessPoint) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("columns", columns) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("dataAccessConfigurationId", dataAccessConfigurationId) + .add("dataSourceFormat", dataSourceFormat) + .add("deletedAt", deletedAt) + .add("deltaRuntimePropertiesKvpairs", deltaRuntimePropertiesKvpairs) + .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) + .add("enablePredictiveOptimization", enablePredictiveOptimization) + .add("encryptionDetails", encryptionDetails) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("pipelineId", pipelineId) + .add("properties", properties) + .add("rowFilter", rowFilter) + .add("schemaName", schemaName) + .add("sqlPath", sqlPath) + .add("storageCredentialName", storageCredentialName) + .add("storageLocation", storageLocation) + .add("tableConstraints", tableConstraints) + .add("tableId", tableId) + .add("tableType", tableType) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("viewDefinition", viewDefinition) + .add("viewDependencies", viewDependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java index bd53b8d98..a140f53b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TableRowFilter.TableRowFilterSerializer.class) +@JsonDeserialize(using = TableRowFilter.TableRowFilterDeserializer.class) public class TableRowFilter { /** The full name of the row filter SQL UDF. */ - @JsonProperty("function_name") private String functionName; /** * The list of table columns to be passed as input to the row filter function. The column types * should match the types of the filter function arguments. */ - @JsonProperty("input_column_names") private Collection inputColumnNames; public TableRowFilter setFunctionName(String functionName) { @@ -60,4 +69,40 @@ public String toString() { .add("inputColumnNames", inputColumnNames) .toString(); } + + TableRowFilterPb toPb() { + TableRowFilterPb pb = new TableRowFilterPb(); + pb.setFunctionName(functionName); + pb.setInputColumnNames(inputColumnNames); + + return pb; + } + + static TableRowFilter fromPb(TableRowFilterPb pb) { + TableRowFilter model = new TableRowFilter(); + model.setFunctionName(pb.getFunctionName()); + model.setInputColumnNames(pb.getInputColumnNames()); + + return model; + } + + public static class TableRowFilterSerializer extends JsonSerializer { + @Override + public void serialize(TableRowFilter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableRowFilterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableRowFilterDeserializer extends JsonDeserializer { + @Override + public TableRowFilter deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableRowFilterPb pb = mapper.readValue(p, TableRowFilterPb.class); + return TableRowFilter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilterPb.java new file mode 100755 index 000000000..a3b85f077 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilterPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TableRowFilterPb { + @JsonProperty("function_name") + private String functionName; + + @JsonProperty("input_column_names") + private Collection inputColumnNames; + + public TableRowFilterPb setFunctionName(String functionName) { + this.functionName = functionName; + return this; + } + + public String getFunctionName() { + return functionName; + } + + public TableRowFilterPb setInputColumnNames(Collection inputColumnNames) { + this.inputColumnNames = inputColumnNames; + return this; + } + + public Collection getInputColumnNames() { + return inputColumnNames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableRowFilterPb that = (TableRowFilterPb) o; + return Objects.equals(functionName, that.functionName) + && Objects.equals(inputColumnNames, that.inputColumnNames); + } + + @Override + public int hashCode() { + return Objects.hash(functionName, inputColumnNames); + } + + @Override + public String toString() { + return new ToStringer(TableRowFilterPb.class) + .add("functionName", functionName) + .add("inputColumnNames", inputColumnNames) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java index ea7c14e18..6f6801f2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TableSummary.TableSummarySerializer.class) +@JsonDeserialize(using = TableSummary.TableSummaryDeserializer.class) public class TableSummary { /** The full name of the table. */ - @JsonProperty("full_name") private String fullName; /** */ - @JsonProperty("table_type") private TableType tableType; public TableSummary setFullName(String fullName) { @@ -55,4 +64,39 @@ public String toString() { .add("tableType", tableType) .toString(); } + + TableSummaryPb toPb() { + TableSummaryPb pb = new TableSummaryPb(); + pb.setFullName(fullName); + pb.setTableType(tableType); + + return pb; + } + + static TableSummary fromPb(TableSummaryPb pb) { + TableSummary model = new TableSummary(); + model.setFullName(pb.getFullName()); + model.setTableType(pb.getTableType()); + + return model; + } + + public static class TableSummarySerializer extends JsonSerializer { + @Override + public void serialize(TableSummary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableSummaryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableSummaryDeserializer extends JsonDeserializer { + @Override + public TableSummary deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableSummaryPb pb = mapper.readValue(p, TableSummaryPb.class); + return TableSummary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummaryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummaryPb.java new file mode 100755 index 000000000..f6dab28f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummaryPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TableSummaryPb { + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("table_type") + private TableType tableType; + + public TableSummaryPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public TableSummaryPb setTableType(TableType tableType) { + this.tableType = tableType; + return this; + } + + public TableType getTableType() { + return tableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableSummaryPb that = (TableSummaryPb) o; + return Objects.equals(fullName, that.fullName) && Objects.equals(tableType, that.tableType); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, tableType); + } + + @Override + public String toString() { + return new ToStringer(TableSummaryPb.class) + .add("fullName", fullName) + .add("tableType", tableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java index 6f5b3304b..6a097b276 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java @@ -21,7 +21,7 @@ public void delete(DeleteTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public TableExistsResponse exists(ExistsRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/exists", request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, TableExistsResponse.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public TableInfo get(GetTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, TableInfo.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListTablesResponse list(ListTablesRequest request) { String path = "/api/2.1/unity-catalog/tables"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListTablesResponse.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public ListTableSummariesResponse listSummaries(ListSummariesRequest request) { String path = "/api/2.1/unity-catalog/table-summaries"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListTableSummariesResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public void update(UpdateTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java index 3bc328e51..9362071a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TagKeyValue.TagKeyValueSerializer.class) +@JsonDeserialize(using = TagKeyValue.TagKeyValueDeserializer.class) public class TagKeyValue { /** name of the tag */ - @JsonProperty("key") private String key; /** value of the tag associated with the key, could be optional */ - @JsonProperty("value") private String value; public TagKeyValue setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(TagKeyValue.class).add("key", key).add("value", value).toString(); } + + TagKeyValuePb toPb() { + TagKeyValuePb pb = new TagKeyValuePb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static TagKeyValue fromPb(TagKeyValuePb pb) { + TagKeyValue model = new TagKeyValue(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class TagKeyValueSerializer extends JsonSerializer { + @Override + public void serialize(TagKeyValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TagKeyValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TagKeyValueDeserializer extends JsonDeserializer { + @Override + public TagKeyValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TagKeyValuePb pb = mapper.readValue(p, TagKeyValuePb.class); + return TagKeyValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValuePb.java new file mode 100755 index 000000000..076737e6d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValuePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TagKeyValuePb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public TagKeyValuePb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public TagKeyValuePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TagKeyValuePb that = (TagKeyValuePb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(TagKeyValuePb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java index 8083ee16b..6f5e58ca3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TemporaryCredentials.TemporaryCredentialsSerializer.class) +@JsonDeserialize(using = TemporaryCredentials.TemporaryCredentialsDeserializer.class) public class TemporaryCredentials { /** * AWS temporary credentials for API authentication. Read more at * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. */ - @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; /** @@ -21,21 +31,18 @@ public class TemporaryCredentials { * Managed Identity. Read more at * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token */ - @JsonProperty("azure_aad") private AzureActiveDirectoryToken azureAad; /** * Server time when the credential will expire, in epoch milliseconds. The API client is advised * to cache the credential given this expiration time. */ - @JsonProperty("expiration_time") private Long expirationTime; /** * GCP temporary credentials for API authentication. Read more at * https://developers.google.com/identity/protocols/oauth2/service-account */ - @JsonProperty("gcp_oauth_token") private GcpOauthToken gcpOauthToken; public TemporaryCredentials setAwsTempCredentials(AwsCredentials awsTempCredentials) { @@ -99,4 +106,46 @@ public String toString() { .add("gcpOauthToken", gcpOauthToken) .toString(); } + + TemporaryCredentialsPb toPb() { + TemporaryCredentialsPb pb = new TemporaryCredentialsPb(); + pb.setAwsTempCredentials(awsTempCredentials); + pb.setAzureAad(azureAad); + pb.setExpirationTime(expirationTime); + pb.setGcpOauthToken(gcpOauthToken); + + return pb; + } + + static TemporaryCredentials fromPb(TemporaryCredentialsPb pb) { + TemporaryCredentials model = new TemporaryCredentials(); + model.setAwsTempCredentials(pb.getAwsTempCredentials()); + model.setAzureAad(pb.getAzureAad()); + model.setExpirationTime(pb.getExpirationTime()); + model.setGcpOauthToken(pb.getGcpOauthToken()); + + return model; + } + + public static class TemporaryCredentialsSerializer extends JsonSerializer { + @Override + public void serialize( + TemporaryCredentials value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TemporaryCredentialsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TemporaryCredentialsDeserializer + extends JsonDeserializer { + @Override + public TemporaryCredentials deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TemporaryCredentialsPb pb = mapper.readValue(p, TemporaryCredentialsPb.class); + return TemporaryCredentials.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentialsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentialsPb.java new file mode 100755 index 000000000..d64c77bdf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentialsPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TemporaryCredentialsPb { + @JsonProperty("aws_temp_credentials") + private AwsCredentials awsTempCredentials; + + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("gcp_oauth_token") + private GcpOauthToken gcpOauthToken; + + public TemporaryCredentialsPb setAwsTempCredentials(AwsCredentials awsTempCredentials) { + this.awsTempCredentials = awsTempCredentials; + return this; + } + + public AwsCredentials getAwsTempCredentials() { + return awsTempCredentials; + } + + public TemporaryCredentialsPb setAzureAad(AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + + public TemporaryCredentialsPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public TemporaryCredentialsPb setGcpOauthToken(GcpOauthToken gcpOauthToken) { + this.gcpOauthToken = gcpOauthToken; + return this; + } + + public GcpOauthToken getGcpOauthToken() { + return gcpOauthToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TemporaryCredentialsPb that = (TemporaryCredentialsPb) o; + return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(gcpOauthToken, that.gcpOauthToken); + } + + @Override + public int hashCode() { + return Objects.hash(awsTempCredentials, azureAad, expirationTime, gcpOauthToken); + } + + @Override + public String toString() { + return new ToStringer(TemporaryCredentialsPb.class) + .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) + .add("expirationTime", expirationTime) + .add("gcpOauthToken", gcpOauthToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java index 8c3fab716..4ec30ff6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java @@ -22,7 +22,7 @@ public GenerateTemporaryTableCredentialResponse generateTemporaryTableCredential String path = "/api/2.0/unity-catalog/temporary-table-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, GenerateTemporaryTableCredentialResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java index e455fc710..78c464762 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,23 +21,22 @@ * or the ONLINE_NO_PENDING_UPDATE state. */ @Generated +@JsonSerialize(using = TriggeredUpdateStatus.TriggeredUpdateStatusSerializer.class) +@JsonDeserialize(using = TriggeredUpdateStatus.TriggeredUpdateStatusDeserializer.class) public class TriggeredUpdateStatus { /** * The last source table Delta version that was synced to the online table. Note that this Delta * version may not be completely synced to the online table yet. */ - @JsonProperty("last_processed_commit_version") private Long lastProcessedCommitVersion; /** * The timestamp of the last time any data was synchronized from the source table to the online * table. */ - @JsonProperty("timestamp") private String timestamp; /** Progress of the active data synchronization pipeline. */ - @JsonProperty("triggered_update_progress") private PipelineProgress triggeredUpdateProgress; public TriggeredUpdateStatus setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { @@ -82,4 +90,45 @@ public String toString() { .add("triggeredUpdateProgress", triggeredUpdateProgress) .toString(); } + + TriggeredUpdateStatusPb toPb() { + TriggeredUpdateStatusPb pb = new TriggeredUpdateStatusPb(); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + pb.setTriggeredUpdateProgress(triggeredUpdateProgress); + + return pb; + } + + static TriggeredUpdateStatus fromPb(TriggeredUpdateStatusPb pb) { + TriggeredUpdateStatus model = new TriggeredUpdateStatus(); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + model.setTriggeredUpdateProgress(pb.getTriggeredUpdateProgress()); + + return model; + } + + public static class TriggeredUpdateStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + TriggeredUpdateStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TriggeredUpdateStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TriggeredUpdateStatusDeserializer + extends JsonDeserializer { + @Override + public TriggeredUpdateStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TriggeredUpdateStatusPb pb = mapper.readValue(p, TriggeredUpdateStatusPb.class); + return TriggeredUpdateStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatusPb.java new file mode 100755 index 000000000..e20f94ad2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatusPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE + * or the ONLINE_NO_PENDING_UPDATE state. + */ +@Generated +class TriggeredUpdateStatusPb { + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + @JsonProperty("triggered_update_progress") + private PipelineProgress triggeredUpdateProgress; + + public TriggeredUpdateStatusPb setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public TriggeredUpdateStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public TriggeredUpdateStatusPb setTriggeredUpdateProgress( + PipelineProgress triggeredUpdateProgress) { + this.triggeredUpdateProgress = triggeredUpdateProgress; + return this; + } + + public PipelineProgress getTriggeredUpdateProgress() { + return triggeredUpdateProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TriggeredUpdateStatusPb that = (TriggeredUpdateStatusPb) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(triggeredUpdateProgress, that.triggeredUpdateProgress); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp, triggeredUpdateProgress); + } + + @Override + public String toString() { + return new ToStringer(TriggeredUpdateStatusPb.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .add("triggeredUpdateProgress", triggeredUpdateProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java index 29c6aac69..ed925407b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an assignment */ @Generated +@JsonSerialize(using = UnassignRequest.UnassignRequestSerializer.class) +@JsonDeserialize(using = UnassignRequest.UnassignRequestDeserializer.class) public class UnassignRequest { /** Query for the ID of the metastore to delete. */ - @JsonIgnore - @QueryParam("metastore_id") private String metastoreId; /** A workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public UnassignRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -58,4 +66,40 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + UnassignRequestPb toPb() { + UnassignRequestPb pb = new UnassignRequestPb(); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static UnassignRequest fromPb(UnassignRequestPb pb) { + UnassignRequest model = new UnassignRequest(); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class UnassignRequestSerializer extends JsonSerializer { + @Override + public void serialize(UnassignRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnassignRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnassignRequestDeserializer extends JsonDeserializer { + @Override + public UnassignRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnassignRequestPb pb = mapper.readValue(p, UnassignRequestPb.class); + return UnassignRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequestPb.java new file mode 100755 index 000000000..57c56ee36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an assignment */ +@Generated +class UnassignRequestPb { + @JsonIgnore + @QueryParam("metastore_id") + private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public UnassignRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public UnassignRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UnassignRequestPb that = (UnassignRequestPb) o; + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(UnassignRequestPb.class) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java index 61a2a8cb3..77b1834ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UnassignResponse.UnassignResponseSerializer.class) +@JsonDeserialize(using = UnassignResponse.UnassignResponseDeserializer.class) public class UnassignResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UnassignResponse.class).toString(); } + + UnassignResponsePb toPb() { + UnassignResponsePb pb = new UnassignResponsePb(); + + return pb; + } + + static UnassignResponse fromPb(UnassignResponsePb pb) { + UnassignResponse model = new UnassignResponse(); + + return model; + } + + public static class UnassignResponseSerializer extends JsonSerializer { + @Override + public void serialize(UnassignResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnassignResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnassignResponseDeserializer extends JsonDeserializer { + @Override + public UnassignResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnassignResponsePb pb = mapper.readValue(p, UnassignResponsePb.class); + return UnassignResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponsePb.java new file mode 100755 index 000000000..0e66eb449 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UnassignResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UnassignResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java index 8deea1834..230bd7f45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateAssignmentResponse.UpdateAssignmentResponseSerializer.class) +@JsonDeserialize(using = UpdateAssignmentResponse.UpdateAssignmentResponseDeserializer.class) public class UpdateAssignmentResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateAssignmentResponse.class).toString(); } + + UpdateAssignmentResponsePb toPb() { + UpdateAssignmentResponsePb pb = new UpdateAssignmentResponsePb(); + + return pb; + } + + static UpdateAssignmentResponse fromPb(UpdateAssignmentResponsePb pb) { + UpdateAssignmentResponse model = new UpdateAssignmentResponse(); + + return model; + } + + public static class UpdateAssignmentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAssignmentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAssignmentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAssignmentResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateAssignmentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAssignmentResponsePb pb = mapper.readValue(p, UpdateAssignmentResponsePb.class); + return UpdateAssignmentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponsePb.java new file mode 100755 index 000000000..d801b4844 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateAssignmentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateAssignmentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java index b817347f1..357c802d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java @@ -4,45 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCatalog.UpdateCatalogSerializer.class) +@JsonDeserialize(using = UpdateCatalog.UpdateCatalogDeserializer.class) public class UpdateCatalog { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Whether predictive optimization should be enabled for this object and objects under it. */ - @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; /** * Whether the current securable is accessible from all workspaces or a specific set of * workspaces. */ - @JsonProperty("isolation_mode") private CatalogIsolationMode isolationMode; /** The name of the catalog. */ - @JsonIgnore private String name; + private String name; /** New name for the catalog. */ - @JsonProperty("new_name") private String newName; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** Username of current owner of catalog. */ - @JsonProperty("owner") private String owner; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; public UpdateCatalog setComment(String comment) { @@ -159,4 +162,51 @@ public String toString() { .add("properties", properties) .toString(); } + + UpdateCatalogPb toPb() { + UpdateCatalogPb pb = new UpdateCatalogPb(); + pb.setComment(comment); + pb.setEnablePredictiveOptimization(enablePredictiveOptimization); + pb.setIsolationMode(isolationMode); + pb.setName(name); + pb.setNewName(newName); + pb.setOptions(options); + pb.setOwner(owner); + pb.setProperties(properties); + + return pb; + } + + static UpdateCatalog fromPb(UpdateCatalogPb pb) { + UpdateCatalog model = new UpdateCatalog(); + model.setComment(pb.getComment()); + model.setEnablePredictiveOptimization(pb.getEnablePredictiveOptimization()); + model.setIsolationMode(pb.getIsolationMode()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOptions(pb.getOptions()); + model.setOwner(pb.getOwner()); + model.setProperties(pb.getProperties()); + + return model; + } + + public static class UpdateCatalogSerializer extends JsonSerializer { + @Override + public void serialize(UpdateCatalog value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCatalogPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCatalogDeserializer extends JsonDeserializer { + @Override + public UpdateCatalog deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCatalogPb pb = mapper.readValue(p, UpdateCatalogPb.class); + return UpdateCatalog.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogPb.java new file mode 100755 index 000000000..87437ef7b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogPb.java @@ -0,0 +1,151 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class UpdateCatalogPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + + @JsonProperty("isolation_mode") + private CatalogIsolationMode isolationMode; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("options") + private Map options; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties") + private Map properties; + + public UpdateCatalogPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateCatalogPb setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + + public UpdateCatalogPb setIsolationMode(CatalogIsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public CatalogIsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateCatalogPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateCatalogPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateCatalogPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public UpdateCatalogPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateCatalogPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCatalogPb that = (UpdateCatalogPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(options, that.options) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + enablePredictiveOptimization, + isolationMode, + name, + newName, + options, + owner, + properties); + } + + @Override + public String toString() { + return new ToStringer(UpdateCatalogPb.class) + .add("comment", comment) + .add("enablePredictiveOptimization", enablePredictiveOptimization) + .add("isolationMode", isolationMode) + .add("name", name) + .add("newName", newName) + .add("options", options) + .add("owner", owner) + .add("properties", properties) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java index d3a70c499..ba210be9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateCatalogWorkspaceBindingsResponse.UpdateCatalogWorkspaceBindingsResponseSerializer + .class) +@JsonDeserialize( + using = + UpdateCatalogWorkspaceBindingsResponse.UpdateCatalogWorkspaceBindingsResponseDeserializer + .class) public class UpdateCatalogWorkspaceBindingsResponse { /** A list of workspace IDs */ - @JsonProperty("workspaces") private Collection workspaces; public UpdateCatalogWorkspaceBindingsResponse setWorkspaces(Collection workspaces) { @@ -42,4 +58,45 @@ public String toString() { .add("workspaces", workspaces) .toString(); } + + UpdateCatalogWorkspaceBindingsResponsePb toPb() { + UpdateCatalogWorkspaceBindingsResponsePb pb = new UpdateCatalogWorkspaceBindingsResponsePb(); + pb.setWorkspaces(workspaces); + + return pb; + } + + static UpdateCatalogWorkspaceBindingsResponse fromPb( + UpdateCatalogWorkspaceBindingsResponsePb pb) { + UpdateCatalogWorkspaceBindingsResponse model = new UpdateCatalogWorkspaceBindingsResponse(); + model.setWorkspaces(pb.getWorkspaces()); + + return model; + } + + public static class UpdateCatalogWorkspaceBindingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCatalogWorkspaceBindingsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateCatalogWorkspaceBindingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCatalogWorkspaceBindingsResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateCatalogWorkspaceBindingsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCatalogWorkspaceBindingsResponsePb pb = + mapper.readValue(p, UpdateCatalogWorkspaceBindingsResponsePb.class); + return UpdateCatalogWorkspaceBindingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponsePb.java new file mode 100755 index 000000000..f499af0e9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateCatalogWorkspaceBindingsResponsePb { + @JsonProperty("workspaces") + private Collection workspaces; + + public UpdateCatalogWorkspaceBindingsResponsePb setWorkspaces(Collection workspaces) { + this.workspaces = workspaces; + return this; + } + + public Collection getWorkspaces() { + return workspaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCatalogWorkspaceBindingsResponsePb that = (UpdateCatalogWorkspaceBindingsResponsePb) o; + return Objects.equals(workspaces, that.workspaces); + } + + @Override + public int hashCode() { + return Objects.hash(workspaces); + } + + @Override + public String toString() { + return new ToStringer(UpdateCatalogWorkspaceBindingsResponsePb.class) + .add("workspaces", workspaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java index d37165c3a..d53ad0dfe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateConnection.UpdateConnectionSerializer.class) +@JsonDeserialize(using = UpdateConnection.UpdateConnectionDeserializer.class) public class UpdateConnection { /** Name of the connection. */ - @JsonIgnore private String name; + private String name; /** New name for the connection. */ - @JsonProperty("new_name") private String newName; /** A map of key-value properties attached to the securable. */ - @JsonProperty("options") private Map options; /** Username of current owner of the connection. */ - @JsonProperty("owner") private String owner; public UpdateConnection setName(String name) { @@ -87,4 +94,44 @@ public String toString() { .add("owner", owner) .toString(); } + + UpdateConnectionPb toPb() { + UpdateConnectionPb pb = new UpdateConnectionPb(); + pb.setName(name); + pb.setNewName(newName); + pb.setOptions(options); + pb.setOwner(owner); + + return pb; + } + + static UpdateConnection fromPb(UpdateConnectionPb pb) { + UpdateConnection model = new UpdateConnection(); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOptions(pb.getOptions()); + model.setOwner(pb.getOwner()); + + return model; + } + + public static class UpdateConnectionSerializer extends JsonSerializer { + @Override + public void serialize(UpdateConnection value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateConnectionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateConnectionDeserializer extends JsonDeserializer { + @Override + public UpdateConnection deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateConnectionPb pb = mapper.readValue(p, UpdateConnectionPb.class); + return UpdateConnection.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnectionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnectionPb.java new file mode 100755 index 000000000..aade50c50 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnectionPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class UpdateConnectionPb { + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("options") + private Map options; + + @JsonProperty("owner") + private String owner; + + public UpdateConnectionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateConnectionPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateConnectionPb setOptions(Map options) { + this.options = options; + return this; + } + + public Map getOptions() { + return options; + } + + public UpdateConnectionPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateConnectionPb that = (UpdateConnectionPb) o; + return Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(options, that.options) + && Objects.equals(owner, that.owner); + } + + @Override + public int hashCode() { + return Objects.hash(name, newName, options, owner); + } + + @Override + public String toString() { + return new ToStringer(UpdateConnectionPb.class) + .add("name", name) + .add("newName", newName) + .add("options", options) + .add("owner", owner) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java index 6f91812cf..9663400b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java @@ -4,66 +4,65 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCredentialRequest.UpdateCredentialRequestSerializer.class) +@JsonDeserialize(using = UpdateCredentialRequest.UpdateCredentialRequestDeserializer.class) public class UpdateCredentialRequest { /** The AWS IAM role configuration */ - @JsonProperty("aws_iam_role") private AwsIamRole awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccount databricksGcpServiceAccount; /** * Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent * external locations and external tables (when purpose is **STORAGE**). */ - @JsonProperty("force") private Boolean force; /** * Whether the current securable is accessible from all workspaces or a specific set of * workspaces. */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Name of the credential. */ - @JsonIgnore private String nameArg; + private String nameArg; /** New name of credential. */ - @JsonProperty("new_name") private String newName; /** Username of current owner of credential. */ - @JsonProperty("owner") private String owner; /** * Whether the credential is usable only for read operations. Only applicable when purpose is * **STORAGE**. */ - @JsonProperty("read_only") private Boolean readOnly; /** Supply true to this argument to skip validation of the updated credential. */ - @JsonProperty("skip_validation") private Boolean skipValidation; public UpdateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { @@ -230,4 +229,63 @@ public String toString() { .add("skipValidation", skipValidation) .toString(); } + + UpdateCredentialRequestPb toPb() { + UpdateCredentialRequestPb pb = new UpdateCredentialRequestPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setComment(comment); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setForce(force); + pb.setIsolationMode(isolationMode); + pb.setNameArg(nameArg); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + + return pb; + } + + static UpdateCredentialRequest fromPb(UpdateCredentialRequestPb pb) { + UpdateCredentialRequest model = new UpdateCredentialRequest(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setComment(pb.getComment()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setForce(pb.getForce()); + model.setIsolationMode(pb.getIsolationMode()); + model.setNameArg(pb.getNameArg()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + + return model; + } + + public static class UpdateCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCredentialRequestPb pb = mapper.readValue(p, UpdateCredentialRequestPb.class); + return UpdateCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequestPb.java new file mode 100755 index 000000000..1f1699559 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequestPb.java @@ -0,0 +1,212 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateCredentialRequestPb { + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + + @JsonProperty("force") + private Boolean force; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonIgnore private String nameArg; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public UpdateCredentialRequestPb setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public UpdateCredentialRequestPb setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public UpdateCredentialRequestPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public UpdateCredentialRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateCredentialRequestPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public UpdateCredentialRequestPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public UpdateCredentialRequestPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateCredentialRequestPb setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + public UpdateCredentialRequestPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateCredentialRequestPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateCredentialRequestPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public UpdateCredentialRequestPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCredentialRequestPb that = (UpdateCredentialRequestPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(force, that.force) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(nameArg, that.nameArg) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + comment, + databricksGcpServiceAccount, + force, + isolationMode, + nameArg, + newName, + owner, + readOnly, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialRequestPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("force", force) + .add("isolationMode", isolationMode) + .add("nameArg", nameArg) + .add("newName", newName) + .add("owner", owner) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java index d2a759d9f..172531e24 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java @@ -4,26 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExternalLocation.UpdateExternalLocationSerializer.class) +@JsonDeserialize(using = UpdateExternalLocation.UpdateExternalLocationDeserializer.class) public class UpdateExternalLocation { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Name of the storage credential used with this location. */ - @JsonProperty("credential_name") private String credentialName; /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ - @JsonProperty("enable_file_events") private Boolean enableFileEvents; /** Encryption options that apply to clients connecting to cloud storage. */ - @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; /** @@ -31,42 +37,33 @@ public class UpdateExternalLocation { * enabled, the access to the location falls back to cluster credentials if UC credentials are not * sufficient. */ - @JsonProperty("fallback") private Boolean fallback; /** [Create:OPT Update:OPT] File event queue settings. */ - @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; /** Force update even if changing url invalidates dependent external tables or mounts. */ - @JsonProperty("force") private Boolean force; /** */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Name of the external location. */ - @JsonIgnore private String name; + private String name; /** New name for the external location. */ - @JsonProperty("new_name") private String newName; /** The owner of the external location. */ - @JsonProperty("owner") private String owner; /** Indicates whether the external location is read-only. */ - @JsonProperty("read_only") private Boolean readOnly; /** Skips validation of the storage credential associated with the external location. */ - @JsonProperty("skip_validation") private Boolean skipValidation; /** Path URL of the external location. */ - @JsonProperty("url") private String url; public UpdateExternalLocation setComment(String comment) { @@ -254,4 +251,67 @@ public String toString() { .add("url", url) .toString(); } + + UpdateExternalLocationPb toPb() { + UpdateExternalLocationPb pb = new UpdateExternalLocationPb(); + pb.setComment(comment); + pb.setCredentialName(credentialName); + pb.setEnableFileEvents(enableFileEvents); + pb.setEncryptionDetails(encryptionDetails); + pb.setFallback(fallback); + pb.setFileEventQueue(fileEventQueue); + pb.setForce(force); + pb.setIsolationMode(isolationMode); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + pb.setUrl(url); + + return pb; + } + + static UpdateExternalLocation fromPb(UpdateExternalLocationPb pb) { + UpdateExternalLocation model = new UpdateExternalLocation(); + model.setComment(pb.getComment()); + model.setCredentialName(pb.getCredentialName()); + model.setEnableFileEvents(pb.getEnableFileEvents()); + model.setEncryptionDetails(pb.getEncryptionDetails()); + model.setFallback(pb.getFallback()); + model.setFileEventQueue(pb.getFileEventQueue()); + model.setForce(pb.getForce()); + model.setIsolationMode(pb.getIsolationMode()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class UpdateExternalLocationSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExternalLocation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExternalLocationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExternalLocationDeserializer + extends JsonDeserializer { + @Override + public UpdateExternalLocation deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExternalLocationPb pb = mapper.readValue(p, UpdateExternalLocationPb.class); + return UpdateExternalLocation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocationPb.java new file mode 100755 index 000000000..7677ed2c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocationPb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExternalLocationPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("credential_name") + private String credentialName; + + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + + @JsonProperty("encryption_details") + private EncryptionDetails encryptionDetails; + + @JsonProperty("fallback") + private Boolean fallback; + + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + + @JsonProperty("force") + private Boolean force; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + @JsonProperty("url") + private String url; + + public UpdateExternalLocationPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateExternalLocationPb setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public UpdateExternalLocationPb setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + + public UpdateExternalLocationPb setEncryptionDetails(EncryptionDetails encryptionDetails) { + this.encryptionDetails = encryptionDetails; + return this; + } + + public EncryptionDetails getEncryptionDetails() { + return encryptionDetails; + } + + public UpdateExternalLocationPb setFallback(Boolean fallback) { + this.fallback = fallback; + return this; + } + + public Boolean getFallback() { + return fallback; + } + + public UpdateExternalLocationPb setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + + public UpdateExternalLocationPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public UpdateExternalLocationPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateExternalLocationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateExternalLocationPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateExternalLocationPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateExternalLocationPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public UpdateExternalLocationPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + public UpdateExternalLocationPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExternalLocationPb that = (UpdateExternalLocationPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) + && Objects.equals(encryptionDetails, that.encryptionDetails) + && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) + && Objects.equals(force, that.force) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + credentialName, + enableFileEvents, + encryptionDetails, + fallback, + fileEventQueue, + force, + isolationMode, + name, + newName, + owner, + readOnly, + skipValidation, + url); + } + + @Override + public String toString() { + return new ToStringer(UpdateExternalLocationPb.class) + .add("comment", comment) + .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) + .add("encryptionDetails", encryptionDetails) + .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) + .add("force", force) + .add("isolationMode", isolationMode) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java index a785536bf..f91953923 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateFunction.UpdateFunctionSerializer.class) +@JsonDeserialize(using = UpdateFunction.UpdateFunctionDeserializer.class) public class UpdateFunction { /** * The fully-qualified name of the function (of the form * __catalog_name__.__schema_name__.__function__name__). */ - @JsonIgnore private String name; + private String name; /** Username of current owner of function. */ - @JsonProperty("owner") private String owner; public UpdateFunction setName(String name) { @@ -55,4 +64,40 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateFunction.class).add("name", name).add("owner", owner).toString(); } + + UpdateFunctionPb toPb() { + UpdateFunctionPb pb = new UpdateFunctionPb(); + pb.setName(name); + pb.setOwner(owner); + + return pb; + } + + static UpdateFunction fromPb(UpdateFunctionPb pb) { + UpdateFunction model = new UpdateFunction(); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + + return model; + } + + public static class UpdateFunctionSerializer extends JsonSerializer { + @Override + public void serialize(UpdateFunction value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateFunctionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateFunctionDeserializer extends JsonDeserializer { + @Override + public UpdateFunction deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateFunctionPb pb = mapper.readValue(p, UpdateFunctionPb.class); + return UpdateFunction.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunctionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunctionPb.java new file mode 100755 index 000000000..d3a270125 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunctionPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateFunctionPb { + @JsonIgnore private String name; + + @JsonProperty("owner") + private String owner; + + public UpdateFunctionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateFunctionPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateFunctionPb that = (UpdateFunctionPb) o; + return Objects.equals(name, that.name) && Objects.equals(owner, that.owner); + } + + @Override + public int hashCode() { + return Objects.hash(name, owner); + } + + @Override + public String toString() { + return new ToStringer(UpdateFunctionPb.class).add("name", name).add("owner", owner).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java index 82c622577..bde590a11 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java @@ -4,44 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateMetastore.UpdateMetastoreSerializer.class) +@JsonDeserialize(using = UpdateMetastore.UpdateMetastoreDeserializer.class) public class UpdateMetastore { /** * The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta * Sharing as the official name. */ - @JsonProperty("delta_sharing_organization_name") private String deltaSharingOrganizationName; /** The lifetime of delta sharing recipient token in seconds. */ - @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") private Long deltaSharingRecipientTokenLifetimeInSeconds; /** The scope of Delta Sharing enabled for the metastore. */ - @JsonProperty("delta_sharing_scope") - private UpdateMetastoreDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Unique ID of the metastore. */ - @JsonIgnore private String id; + private String id; /** New name for the metastore. */ - @JsonProperty("new_name") private String newName; /** The owner of the metastore. */ - @JsonProperty("owner") private String owner; /** Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). */ - @JsonProperty("privilege_model_version") private String privilegeModelVersion; /** UUID of storage credential to access the metastore storage_root. */ - @JsonProperty("storage_root_credential_id") private String storageRootCredentialId; public UpdateMetastore setDeltaSharingOrganizationName(String deltaSharingOrganizationName) { @@ -63,12 +66,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public UpdateMetastore setDeltaSharingScope(UpdateMetastoreDeltaSharingScope deltaSharingScope) { + public UpdateMetastore setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public UpdateMetastoreDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } @@ -162,4 +165,53 @@ public String toString() { .add("storageRootCredentialId", storageRootCredentialId) .toString(); } + + UpdateMetastorePb toPb() { + UpdateMetastorePb pb = new UpdateMetastorePb(); + pb.setDeltaSharingOrganizationName(deltaSharingOrganizationName); + pb.setDeltaSharingRecipientTokenLifetimeInSeconds(deltaSharingRecipientTokenLifetimeInSeconds); + pb.setDeltaSharingScope(deltaSharingScope); + pb.setId(id); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setPrivilegeModelVersion(privilegeModelVersion); + pb.setStorageRootCredentialId(storageRootCredentialId); + + return pb; + } + + static UpdateMetastore fromPb(UpdateMetastorePb pb) { + UpdateMetastore model = new UpdateMetastore(); + model.setDeltaSharingOrganizationName(pb.getDeltaSharingOrganizationName()); + model.setDeltaSharingRecipientTokenLifetimeInSeconds( + pb.getDeltaSharingRecipientTokenLifetimeInSeconds()); + model.setDeltaSharingScope(pb.getDeltaSharingScope()); + model.setId(pb.getId()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setPrivilegeModelVersion(pb.getPrivilegeModelVersion()); + model.setStorageRootCredentialId(pb.getStorageRootCredentialId()); + + return model; + } + + public static class UpdateMetastoreSerializer extends JsonSerializer { + @Override + public void serialize(UpdateMetastore value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateMetastorePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateMetastoreDeserializer extends JsonDeserializer { + @Override + public UpdateMetastore deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateMetastorePb pb = mapper.readValue(p, UpdateMetastorePb.class); + return UpdateMetastore.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java index 79471c1ef..cf45e6b3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateMetastoreAssignment.UpdateMetastoreAssignmentSerializer.class) +@JsonDeserialize(using = UpdateMetastoreAssignment.UpdateMetastoreAssignmentDeserializer.class) public class UpdateMetastoreAssignment { /** - * The name of the default catalog in the metastore. This field is depracted. Please use "Default + * The name of the default catalog in the metastore. This field is deprecated. Please use "Default * Namespace API" to configure the default catalog for a Databricks workspace. */ - @JsonProperty("default_catalog_name") private String defaultCatalogName; /** The unique ID of the metastore. */ - @JsonProperty("metastore_id") private String metastoreId; /** A workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public UpdateMetastoreAssignment setDefaultCatalogName(String defaultCatalogName) { this.defaultCatalogName = defaultCatalogName; @@ -74,4 +82,45 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + UpdateMetastoreAssignmentPb toPb() { + UpdateMetastoreAssignmentPb pb = new UpdateMetastoreAssignmentPb(); + pb.setDefaultCatalogName(defaultCatalogName); + pb.setMetastoreId(metastoreId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static UpdateMetastoreAssignment fromPb(UpdateMetastoreAssignmentPb pb) { + UpdateMetastoreAssignment model = new UpdateMetastoreAssignment(); + model.setDefaultCatalogName(pb.getDefaultCatalogName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class UpdateMetastoreAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateMetastoreAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateMetastoreAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateMetastoreAssignmentDeserializer + extends JsonDeserializer { + @Override + public UpdateMetastoreAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateMetastoreAssignmentPb pb = mapper.readValue(p, UpdateMetastoreAssignmentPb.class); + return UpdateMetastoreAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignmentPb.java new file mode 100755 index 000000000..0921b1525 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignmentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateMetastoreAssignmentPb { + @JsonProperty("default_catalog_name") + private String defaultCatalogName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonIgnore private Long workspaceId; + + public UpdateMetastoreAssignmentPb setDefaultCatalogName(String defaultCatalogName) { + this.defaultCatalogName = defaultCatalogName; + return this; + } + + public String getDefaultCatalogName() { + return defaultCatalogName; + } + + public UpdateMetastoreAssignmentPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public UpdateMetastoreAssignmentPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMetastoreAssignmentPb that = (UpdateMetastoreAssignmentPb) o; + return Objects.equals(defaultCatalogName, that.defaultCatalogName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(defaultCatalogName, metastoreId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(UpdateMetastoreAssignmentPb.class) + .add("defaultCatalogName", defaultCatalogName) + .add("metastoreId", metastoreId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java deleted file mode 100755 index 13d6b6c2e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** The scope of Delta Sharing enabled for the metastore. */ -@Generated -public enum UpdateMetastoreDeltaSharingScope { - INTERNAL, - INTERNAL_AND_EXTERNAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastorePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastorePb.java new file mode 100755 index 000000000..02b7fdbaa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastorePb.java @@ -0,0 +1,154 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateMetastorePb { + @JsonProperty("delta_sharing_organization_name") + private String deltaSharingOrganizationName; + + @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") + private Long deltaSharingRecipientTokenLifetimeInSeconds; + + @JsonProperty("delta_sharing_scope") + private DeltaSharingScopeEnum deltaSharingScope; + + @JsonIgnore private String id; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("privilege_model_version") + private String privilegeModelVersion; + + @JsonProperty("storage_root_credential_id") + private String storageRootCredentialId; + + public UpdateMetastorePb setDeltaSharingOrganizationName(String deltaSharingOrganizationName) { + this.deltaSharingOrganizationName = deltaSharingOrganizationName; + return this; + } + + public String getDeltaSharingOrganizationName() { + return deltaSharingOrganizationName; + } + + public UpdateMetastorePb setDeltaSharingRecipientTokenLifetimeInSeconds( + Long deltaSharingRecipientTokenLifetimeInSeconds) { + this.deltaSharingRecipientTokenLifetimeInSeconds = deltaSharingRecipientTokenLifetimeInSeconds; + return this; + } + + public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { + return deltaSharingRecipientTokenLifetimeInSeconds; + } + + public UpdateMetastorePb setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { + this.deltaSharingScope = deltaSharingScope; + return this; + } + + public DeltaSharingScopeEnum getDeltaSharingScope() { + return deltaSharingScope; + } + + public UpdateMetastorePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateMetastorePb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateMetastorePb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateMetastorePb setPrivilegeModelVersion(String privilegeModelVersion) { + this.privilegeModelVersion = privilegeModelVersion; + return this; + } + + public String getPrivilegeModelVersion() { + return privilegeModelVersion; + } + + public UpdateMetastorePb setStorageRootCredentialId(String storageRootCredentialId) { + this.storageRootCredentialId = storageRootCredentialId; + return this; + } + + public String getStorageRootCredentialId() { + return storageRootCredentialId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMetastorePb that = (UpdateMetastorePb) o; + return Objects.equals(deltaSharingOrganizationName, that.deltaSharingOrganizationName) + && Objects.equals( + deltaSharingRecipientTokenLifetimeInSeconds, + that.deltaSharingRecipientTokenLifetimeInSeconds) + && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(id, that.id) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(privilegeModelVersion, that.privilegeModelVersion) + && Objects.equals(storageRootCredentialId, that.storageRootCredentialId); + } + + @Override + public int hashCode() { + return Objects.hash( + deltaSharingOrganizationName, + deltaSharingRecipientTokenLifetimeInSeconds, + deltaSharingScope, + id, + newName, + owner, + privilegeModelVersion, + storageRootCredentialId); + } + + @Override + public String toString() { + return new ToStringer(UpdateMetastorePb.class) + .add("deltaSharingOrganizationName", deltaSharingOrganizationName) + .add( + "deltaSharingRecipientTokenLifetimeInSeconds", + deltaSharingRecipientTokenLifetimeInSeconds) + .add("deltaSharingScope", deltaSharingScope) + .add("id", id) + .add("newName", newName) + .add("owner", owner) + .add("privilegeModelVersion", privilegeModelVersion) + .add("storageRootCredentialId", storageRootCredentialId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java index 4e220749c..71c08242b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateModelVersionRequest.UpdateModelVersionRequestSerializer.class) +@JsonDeserialize(using = UpdateModelVersionRequest.UpdateModelVersionRequestDeserializer.class) public class UpdateModelVersionRequest { /** The comment attached to the model version */ - @JsonProperty("comment") private String comment; /** The three-level (fully qualified) name of the model version */ - @JsonIgnore private String fullName; + private String fullName; /** The integer version number of the model version */ - @JsonIgnore private Long version; + private Long version; public UpdateModelVersionRequest setComment(String comment) { this.comment = comment; @@ -70,4 +79,45 @@ public String toString() { .add("version", version) .toString(); } + + UpdateModelVersionRequestPb toPb() { + UpdateModelVersionRequestPb pb = new UpdateModelVersionRequestPb(); + pb.setComment(comment); + pb.setFullName(fullName); + pb.setVersion(version); + + return pb; + } + + static UpdateModelVersionRequest fromPb(UpdateModelVersionRequestPb pb) { + UpdateModelVersionRequest model = new UpdateModelVersionRequest(); + model.setComment(pb.getComment()); + model.setFullName(pb.getFullName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class UpdateModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateModelVersionRequestPb pb = mapper.readValue(p, UpdateModelVersionRequestPb.class); + return UpdateModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequestPb.java new file mode 100755 index 000000000..139d79b8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequestPb.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateModelVersionRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonIgnore private String fullName; + + @JsonIgnore private Long version; + + public UpdateModelVersionRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateModelVersionRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateModelVersionRequestPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateModelVersionRequestPb that = (UpdateModelVersionRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(fullName, that.fullName) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(comment, fullName, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateModelVersionRequestPb.class) + .add("comment", comment) + .add("fullName", fullName) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java index 65d9bd639..d5671717a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateMonitor.UpdateMonitorSerializer.class) +@JsonDeserialize(using = UpdateMonitor.UpdateMonitorDeserializer.class) public class UpdateMonitor { /** * Name of the baseline table from which drift metrics are computed from. Columns in the monitored * table should also be present in the baseline table. */ - @JsonProperty("baseline_table_name") private String baselineTableName; /** @@ -23,34 +32,27 @@ public class UpdateMonitor { * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across * time windows). */ - @JsonProperty("custom_metrics") private Collection customMetrics; /** * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in * PENDING state. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The data classification config for the monitor. */ - @JsonProperty("data_classification_config") private MonitorDataClassificationConfig dataClassificationConfig; /** Configuration for monitoring inference logs. */ - @JsonProperty("inference_log") private MonitorInferenceLog inferenceLog; /** The notification settings for the monitor. */ - @JsonProperty("notifications") private MonitorNotifications notifications; /** Schema where output metric tables are created. */ - @JsonProperty("output_schema_name") private String outputSchemaName; /** The schedule for automatically updating and refreshing metric tables. */ - @JsonProperty("schedule") private MonitorCronSchedule schedule; /** @@ -59,18 +61,15 @@ public class UpdateMonitor { * complements. For high-cardinality columns, only the top 100 unique values by frequency will * generate slices. */ - @JsonProperty("slicing_exprs") private Collection slicingExprs; /** Configuration for monitoring snapshot tables. */ - @JsonProperty("snapshot") private MonitorSnapshot snapshot; /** Full name of the table. */ - @JsonIgnore private String tableName; + private String tableName; /** Configuration for monitoring time series tables. */ - @JsonProperty("time_series") private MonitorTimeSeries timeSeries; public UpdateMonitor setBaselineTableName(String baselineTableName) { @@ -235,4 +234,59 @@ public String toString() { .add("timeSeries", timeSeries) .toString(); } + + UpdateMonitorPb toPb() { + UpdateMonitorPb pb = new UpdateMonitorPb(); + pb.setBaselineTableName(baselineTableName); + pb.setCustomMetrics(customMetrics); + pb.setDashboardId(dashboardId); + pb.setDataClassificationConfig(dataClassificationConfig); + pb.setInferenceLog(inferenceLog); + pb.setNotifications(notifications); + pb.setOutputSchemaName(outputSchemaName); + pb.setSchedule(schedule); + pb.setSlicingExprs(slicingExprs); + pb.setSnapshot(snapshot); + pb.setTableName(tableName); + pb.setTimeSeries(timeSeries); + + return pb; + } + + static UpdateMonitor fromPb(UpdateMonitorPb pb) { + UpdateMonitor model = new UpdateMonitor(); + model.setBaselineTableName(pb.getBaselineTableName()); + model.setCustomMetrics(pb.getCustomMetrics()); + model.setDashboardId(pb.getDashboardId()); + model.setDataClassificationConfig(pb.getDataClassificationConfig()); + model.setInferenceLog(pb.getInferenceLog()); + model.setNotifications(pb.getNotifications()); + model.setOutputSchemaName(pb.getOutputSchemaName()); + model.setSchedule(pb.getSchedule()); + model.setSlicingExprs(pb.getSlicingExprs()); + model.setSnapshot(pb.getSnapshot()); + model.setTableName(pb.getTableName()); + model.setTimeSeries(pb.getTimeSeries()); + + return model; + } + + public static class UpdateMonitorSerializer extends JsonSerializer { + @Override + public void serialize(UpdateMonitor value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateMonitorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateMonitorDeserializer extends JsonDeserializer { + @Override + public UpdateMonitor deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateMonitorPb pb = mapper.readValue(p, UpdateMonitorPb.class); + return UpdateMonitor.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitorPb.java new file mode 100755 index 000000000..539160291 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitorPb.java @@ -0,0 +1,211 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateMonitorPb { + @JsonProperty("baseline_table_name") + private String baselineTableName; + + @JsonProperty("custom_metrics") + private Collection customMetrics; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + @JsonProperty("inference_log") + private MonitorInferenceLog inferenceLog; + + @JsonProperty("notifications") + private MonitorNotifications notifications; + + @JsonProperty("output_schema_name") + private String outputSchemaName; + + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + @JsonProperty("snapshot") + private MonitorSnapshot snapshot; + + @JsonIgnore private String tableName; + + @JsonProperty("time_series") + private MonitorTimeSeries timeSeries; + + public UpdateMonitorPb setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public UpdateMonitorPb setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public UpdateMonitorPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public UpdateMonitorPb setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public UpdateMonitorPb setInferenceLog(MonitorInferenceLog inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLog getInferenceLog() { + return inferenceLog; + } + + public UpdateMonitorPb setNotifications(MonitorNotifications notifications) { + this.notifications = notifications; + return this; + } + + public MonitorNotifications getNotifications() { + return notifications; + } + + public UpdateMonitorPb setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public UpdateMonitorPb setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public UpdateMonitorPb setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public UpdateMonitorPb setSnapshot(MonitorSnapshot snapshot) { + this.snapshot = snapshot; + return this; + } + + public MonitorSnapshot getSnapshot() { + return snapshot; + } + + public UpdateMonitorPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public UpdateMonitorPb setTimeSeries(MonitorTimeSeries timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeries getTimeSeries() { + return timeSeries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMonitorPb that = (UpdateMonitorPb) o; + return Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(tableName, that.tableName) + && Objects.equals(timeSeries, that.timeSeries); + } + + @Override + public int hashCode() { + return Objects.hash( + baselineTableName, + customMetrics, + dashboardId, + dataClassificationConfig, + inferenceLog, + notifications, + outputSchemaName, + schedule, + slicingExprs, + snapshot, + tableName, + timeSeries); + } + + @Override + public String toString() { + return new ToStringer(UpdateMonitorPb.class) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dashboardId", dashboardId) + .add("dataClassificationConfig", dataClassificationConfig) + .add("inferenceLog", inferenceLog) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("schedule", schedule) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("tableName", tableName) + .add("timeSeries", timeSeries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java index c75f52e40..fd7260fab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdatePermissions.UpdatePermissionsSerializer.class) +@JsonDeserialize(using = UpdatePermissions.UpdatePermissionsDeserializer.class) public class UpdatePermissions { /** Array of permissions change objects. */ - @JsonProperty("changes") private Collection changes; /** Full name of securable. */ - @JsonIgnore private String fullName; + private String fullName; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + private SecurableType securableType; public UpdatePermissions setChanges(Collection changes) { this.changes = changes; @@ -71,4 +80,42 @@ public String toString() { .add("securableType", securableType) .toString(); } + + UpdatePermissionsPb toPb() { + UpdatePermissionsPb pb = new UpdatePermissionsPb(); + pb.setChanges(changes); + pb.setFullName(fullName); + pb.setSecurableType(securableType); + + return pb; + } + + static UpdatePermissions fromPb(UpdatePermissionsPb pb) { + UpdatePermissions model = new UpdatePermissions(); + model.setChanges(pb.getChanges()); + model.setFullName(pb.getFullName()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class UpdatePermissionsSerializer extends JsonSerializer { + @Override + public void serialize(UpdatePermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePermissionsDeserializer extends JsonDeserializer { + @Override + public UpdatePermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePermissionsPb pb = mapper.readValue(p, UpdatePermissionsPb.class); + return UpdatePermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsPb.java new file mode 100755 index 000000000..ddae81b89 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdatePermissionsPb { + @JsonProperty("changes") + private Collection changes; + + @JsonIgnore private String fullName; + + @JsonIgnore private SecurableType securableType; + + public UpdatePermissionsPb setChanges(Collection changes) { + this.changes = changes; + return this; + } + + public Collection getChanges() { + return changes; + } + + public UpdatePermissionsPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdatePermissionsPb setSecurableType(SecurableType securableType) { + this.securableType = securableType; + return this; + } + + public SecurableType getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePermissionsPb that = (UpdatePermissionsPb) o; + return Objects.equals(changes, that.changes) + && Objects.equals(fullName, that.fullName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(changes, fullName, securableType); + } + + @Override + public String toString() { + return new ToStringer(UpdatePermissionsPb.class) + .add("changes", changes) + .add("fullName", fullName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java index d7fdfe4b0..7be1fa7a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRegisteredModelRequest.UpdateRegisteredModelRequestSerializer.class) +@JsonDeserialize( + using = UpdateRegisteredModelRequest.UpdateRegisteredModelRequestDeserializer.class) public class UpdateRegisteredModelRequest { /** The comment attached to the registered model */ - @JsonProperty("comment") private String comment; /** The three-level (fully qualified) name of the registered model */ - @JsonIgnore private String fullName; + private String fullName; /** New name for the registered model. */ - @JsonProperty("new_name") private String newName; /** The identifier of the user who owns the registered model */ - @JsonProperty("owner") private String owner; public UpdateRegisteredModelRequest setComment(String comment) { @@ -86,4 +94,47 @@ public String toString() { .add("owner", owner) .toString(); } + + UpdateRegisteredModelRequestPb toPb() { + UpdateRegisteredModelRequestPb pb = new UpdateRegisteredModelRequestPb(); + pb.setComment(comment); + pb.setFullName(fullName); + pb.setNewName(newName); + pb.setOwner(owner); + + return pb; + } + + static UpdateRegisteredModelRequest fromPb(UpdateRegisteredModelRequestPb pb) { + UpdateRegisteredModelRequest model = new UpdateRegisteredModelRequest(); + model.setComment(pb.getComment()); + model.setFullName(pb.getFullName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + + return model; + } + + public static class UpdateRegisteredModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateRegisteredModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRegisteredModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRegisteredModelRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateRegisteredModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRegisteredModelRequestPb pb = mapper.readValue(p, UpdateRegisteredModelRequestPb.class); + return UpdateRegisteredModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequestPb.java new file mode 100755 index 000000000..ca0dd93db --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRegisteredModelRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonIgnore private String fullName; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + public UpdateRegisteredModelRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateRegisteredModelRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateRegisteredModelRequestPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateRegisteredModelRequestPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRegisteredModelRequestPb that = (UpdateRegisteredModelRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(fullName, that.fullName) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner); + } + + @Override + public int hashCode() { + return Objects.hash(comment, fullName, newName, owner); + } + + @Override + public String toString() { + return new ToStringer(UpdateRegisteredModelRequestPb.class) + .add("comment", comment) + .add("fullName", fullName) + .add("newName", newName) + .add("owner", owner) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java index c8187417c..9ca1bf0eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponsePb.java new file mode 100755 index 000000000..02edd2a14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java index 5c910b1e7..888be2a11 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateSchema.UpdateSchemaSerializer.class) +@JsonDeserialize(using = UpdateSchema.UpdateSchemaDeserializer.class) public class UpdateSchema { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Whether predictive optimization should be enabled for this object and objects under it. */ - @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; /** Full name of the schema. */ - @JsonIgnore private String fullName; + private String fullName; /** New name for the schema. */ - @JsonProperty("new_name") private String newName; /** Username of current owner of schema. */ - @JsonProperty("owner") private String owner; /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; public UpdateSchema setComment(String comment) { @@ -119,4 +124,47 @@ public String toString() { .add("properties", properties) .toString(); } + + UpdateSchemaPb toPb() { + UpdateSchemaPb pb = new UpdateSchemaPb(); + pb.setComment(comment); + pb.setEnablePredictiveOptimization(enablePredictiveOptimization); + pb.setFullName(fullName); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setProperties(properties); + + return pb; + } + + static UpdateSchema fromPb(UpdateSchemaPb pb) { + UpdateSchema model = new UpdateSchema(); + model.setComment(pb.getComment()); + model.setEnablePredictiveOptimization(pb.getEnablePredictiveOptimization()); + model.setFullName(pb.getFullName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setProperties(pb.getProperties()); + + return model; + } + + public static class UpdateSchemaSerializer extends JsonSerializer { + @Override + public void serialize(UpdateSchema value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateSchemaPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateSchemaDeserializer extends JsonDeserializer { + @Override + public UpdateSchema deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateSchemaPb pb = mapper.readValue(p, UpdateSchemaPb.class); + return UpdateSchema.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchemaPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchemaPb.java new file mode 100755 index 000000000..02555fa36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchemaPb.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class UpdateSchemaPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + + @JsonIgnore private String fullName; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties") + private Map properties; + + public UpdateSchemaPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateSchemaPb setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + + public UpdateSchemaPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateSchemaPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateSchemaPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateSchemaPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSchemaPb that = (UpdateSchemaPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) + && Objects.equals(fullName, that.fullName) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, enablePredictiveOptimization, fullName, newName, owner, properties); + } + + @Override + public String toString() { + return new ToStringer(UpdateSchemaPb.class) + .add("comment", comment) + .add("enablePredictiveOptimization", enablePredictiveOptimization) + .add("fullName", fullName) + .add("newName", newName) + .add("owner", owner) + .add("properties", properties) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java index 504151504..878bc9527 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java @@ -4,61 +4,59 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateStorageCredential.UpdateStorageCredentialSerializer.class) +@JsonDeserialize(using = UpdateStorageCredential.UpdateStorageCredentialDeserializer.class) public class UpdateStorageCredential { /** The AWS IAM role configuration. */ - @JsonProperty("aws_iam_role") private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentityResponse azureManagedIdentity; /** The Azure service principal configuration. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** The Cloudflare API token configuration. */ - @JsonProperty("cloudflare_api_token") private CloudflareApiToken cloudflareApiToken; /** Comment associated with the credential. */ - @JsonProperty("comment") private String comment; /** The Databricks managed GCP service account configuration. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; /** Force update even if there are dependent external locations or external tables. */ - @JsonProperty("force") private Boolean force; /** */ - @JsonProperty("isolation_mode") private IsolationMode isolationMode; /** Name of the storage credential. */ - @JsonIgnore private String name; + private String name; /** New name for the storage credential. */ - @JsonProperty("new_name") private String newName; /** Username of current owner of credential. */ - @JsonProperty("owner") private String owner; /** Whether the storage credential is only usable for read operations. */ - @JsonProperty("read_only") private Boolean readOnly; /** Supplying true to this argument skips validation of the updated credential. */ - @JsonProperty("skip_validation") private Boolean skipValidation; public UpdateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { @@ -237,4 +235,65 @@ public String toString() { .add("skipValidation", skipValidation) .toString(); } + + UpdateStorageCredentialPb toPb() { + UpdateStorageCredentialPb pb = new UpdateStorageCredentialPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setCloudflareApiToken(cloudflareApiToken); + pb.setComment(comment); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setForce(force); + pb.setIsolationMode(isolationMode); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setReadOnly(readOnly); + pb.setSkipValidation(skipValidation); + + return pb; + } + + static UpdateStorageCredential fromPb(UpdateStorageCredentialPb pb) { + UpdateStorageCredential model = new UpdateStorageCredential(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setCloudflareApiToken(pb.getCloudflareApiToken()); + model.setComment(pb.getComment()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setForce(pb.getForce()); + model.setIsolationMode(pb.getIsolationMode()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setReadOnly(pb.getReadOnly()); + model.setSkipValidation(pb.getSkipValidation()); + + return model; + } + + public static class UpdateStorageCredentialSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateStorageCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateStorageCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateStorageCredentialDeserializer + extends JsonDeserializer { + @Override + public UpdateStorageCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateStorageCredentialPb pb = mapper.readValue(p, UpdateStorageCredentialPb.class); + return UpdateStorageCredential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredentialPb.java new file mode 100755 index 000000000..18257ed0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredentialPb.java @@ -0,0 +1,227 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateStorageCredentialPb { + @JsonProperty("aws_iam_role") + private AwsIamRoleRequest awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityResponse azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; + + @JsonProperty("force") + private Boolean force; + + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public UpdateStorageCredentialPb setAwsIamRole(AwsIamRoleRequest awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleRequest getAwsIamRole() { + return awsIamRole; + } + + public UpdateStorageCredentialPb setAzureManagedIdentity( + AzureManagedIdentityResponse azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityResponse getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public UpdateStorageCredentialPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public UpdateStorageCredentialPb setCloudflareApiToken(CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public UpdateStorageCredentialPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateStorageCredentialPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public UpdateStorageCredentialPb setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public UpdateStorageCredentialPb setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateStorageCredentialPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateStorageCredentialPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateStorageCredentialPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateStorageCredentialPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public UpdateStorageCredentialPb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateStorageCredentialPb that = (UpdateStorageCredentialPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(force, that.force) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + comment, + databricksGcpServiceAccount, + force, + isolationMode, + name, + newName, + owner, + readOnly, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(UpdateStorageCredentialPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("force", force) + .add("isolationMode", isolationMode) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .add("readOnly", readOnly) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java index 5f8a08040..05d64c70c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update a table owner. */ @Generated +@JsonSerialize(using = UpdateTableRequest.UpdateTableRequestSerializer.class) +@JsonDeserialize(using = UpdateTableRequest.UpdateTableRequestDeserializer.class) public class UpdateTableRequest { /** Full name of the table. */ - @JsonIgnore private String fullName; + private String fullName; /** */ - @JsonProperty("owner") private String owner; public UpdateTableRequest setFullName(String fullName) { @@ -56,4 +65,40 @@ public String toString() { .add("owner", owner) .toString(); } + + UpdateTableRequestPb toPb() { + UpdateTableRequestPb pb = new UpdateTableRequestPb(); + pb.setFullName(fullName); + pb.setOwner(owner); + + return pb; + } + + static UpdateTableRequest fromPb(UpdateTableRequestPb pb) { + UpdateTableRequest model = new UpdateTableRequest(); + model.setFullName(pb.getFullName()); + model.setOwner(pb.getOwner()); + + return model; + } + + public static class UpdateTableRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateTableRequestDeserializer extends JsonDeserializer { + @Override + public UpdateTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateTableRequestPb pb = mapper.readValue(p, UpdateTableRequestPb.class); + return UpdateTableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequestPb.java new file mode 100755 index 000000000..f1399dcae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a table owner. */ +@Generated +class UpdateTableRequestPb { + @JsonIgnore private String fullName; + + @JsonProperty("owner") + private String owner; + + public UpdateTableRequestPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateTableRequestPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateTableRequestPb that = (UpdateTableRequestPb) o; + return Objects.equals(fullName, that.fullName) && Objects.equals(owner, that.owner); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, owner); + } + + @Override + public String toString() { + return new ToStringer(UpdateTableRequestPb.class) + .add("fullName", fullName) + .add("owner", owner) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java index 73bcf0400..d63cb0e06 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateVolumeRequestContent.UpdateVolumeRequestContentSerializer.class) +@JsonDeserialize(using = UpdateVolumeRequestContent.UpdateVolumeRequestContentDeserializer.class) public class UpdateVolumeRequestContent { /** The comment attached to the volume */ - @JsonProperty("comment") private String comment; /** The three-level (fully qualified) name of the volume */ - @JsonIgnore private String name; + private String name; /** New name for the volume. */ - @JsonProperty("new_name") private String newName; /** The identifier of the user who owns the volume */ - @JsonProperty("owner") private String owner; public UpdateVolumeRequestContent setComment(String comment) { @@ -86,4 +93,47 @@ public String toString() { .add("owner", owner) .toString(); } + + UpdateVolumeRequestContentPb toPb() { + UpdateVolumeRequestContentPb pb = new UpdateVolumeRequestContentPb(); + pb.setComment(comment); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + + return pb; + } + + static UpdateVolumeRequestContent fromPb(UpdateVolumeRequestContentPb pb) { + UpdateVolumeRequestContent model = new UpdateVolumeRequestContent(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + + return model; + } + + public static class UpdateVolumeRequestContentSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateVolumeRequestContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateVolumeRequestContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateVolumeRequestContentDeserializer + extends JsonDeserializer { + @Override + public UpdateVolumeRequestContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateVolumeRequestContentPb pb = mapper.readValue(p, UpdateVolumeRequestContentPb.class); + return UpdateVolumeRequestContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContentPb.java new file mode 100755 index 000000000..ea9690561 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContentPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateVolumeRequestContentPb { + @JsonProperty("comment") + private String comment; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + public UpdateVolumeRequestContentPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateVolumeRequestContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateVolumeRequestContentPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateVolumeRequestContentPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateVolumeRequestContentPb that = (UpdateVolumeRequestContentPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, newName, owner); + } + + @Override + public String toString() { + return new ToStringer(UpdateVolumeRequestContentPb.class) + .add("comment", comment) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java index c9530e9b6..19e7db7ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateWorkspaceBindings.UpdateWorkspaceBindingsSerializer.class) +@JsonDeserialize(using = UpdateWorkspaceBindings.UpdateWorkspaceBindingsDeserializer.class) public class UpdateWorkspaceBindings { /** A list of workspace IDs. */ - @JsonProperty("assign_workspaces") private Collection assignWorkspaces; /** The name of the catalog. */ - @JsonIgnore private String name; + private String name; /** A list of workspace IDs. */ - @JsonProperty("unassign_workspaces") private Collection unassignWorkspaces; public UpdateWorkspaceBindings setAssignWorkspaces(Collection assignWorkspaces) { @@ -72,4 +80,45 @@ public String toString() { .add("unassignWorkspaces", unassignWorkspaces) .toString(); } + + UpdateWorkspaceBindingsPb toPb() { + UpdateWorkspaceBindingsPb pb = new UpdateWorkspaceBindingsPb(); + pb.setAssignWorkspaces(assignWorkspaces); + pb.setName(name); + pb.setUnassignWorkspaces(unassignWorkspaces); + + return pb; + } + + static UpdateWorkspaceBindings fromPb(UpdateWorkspaceBindingsPb pb) { + UpdateWorkspaceBindings model = new UpdateWorkspaceBindings(); + model.setAssignWorkspaces(pb.getAssignWorkspaces()); + model.setName(pb.getName()); + model.setUnassignWorkspaces(pb.getUnassignWorkspaces()); + + return model; + } + + public static class UpdateWorkspaceBindingsSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceBindings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceBindingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceBindingsDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceBindings deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceBindingsPb pb = mapper.readValue(p, UpdateWorkspaceBindingsPb.class); + return UpdateWorkspaceBindings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java index 3af63d755..93e2f65a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java @@ -4,29 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateWorkspaceBindingsParameters.UpdateWorkspaceBindingsParametersSerializer.class) +@JsonDeserialize( + using = UpdateWorkspaceBindingsParameters.UpdateWorkspaceBindingsParametersDeserializer.class) public class UpdateWorkspaceBindingsParameters { /** List of workspace bindings. */ - @JsonProperty("add") private Collection add; /** List of workspace bindings. */ - @JsonProperty("remove") private Collection remove; /** The name of the securable. */ - @JsonIgnore private String securableName; + private String securableName; /** * The type of the securable to bind to a workspace (catalog, storage_credential, credential, or * external_location). */ - @JsonIgnore private String securableType; + private String securableType; public UpdateWorkspaceBindingsParameters setAdd(Collection add) { this.add = add; @@ -89,4 +99,48 @@ public String toString() { .add("securableType", securableType) .toString(); } + + UpdateWorkspaceBindingsParametersPb toPb() { + UpdateWorkspaceBindingsParametersPb pb = new UpdateWorkspaceBindingsParametersPb(); + pb.setAdd(add); + pb.setRemove(remove); + pb.setSecurableName(securableName); + pb.setSecurableType(securableType); + + return pb; + } + + static UpdateWorkspaceBindingsParameters fromPb(UpdateWorkspaceBindingsParametersPb pb) { + UpdateWorkspaceBindingsParameters model = new UpdateWorkspaceBindingsParameters(); + model.setAdd(pb.getAdd()); + model.setRemove(pb.getRemove()); + model.setSecurableName(pb.getSecurableName()); + model.setSecurableType(pb.getSecurableType()); + + return model; + } + + public static class UpdateWorkspaceBindingsParametersSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceBindingsParameters value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceBindingsParametersPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceBindingsParametersDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceBindingsParameters deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceBindingsParametersPb pb = + mapper.readValue(p, UpdateWorkspaceBindingsParametersPb.class); + return UpdateWorkspaceBindingsParameters.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParametersPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParametersPb.java new file mode 100755 index 000000000..d5753f5e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParametersPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateWorkspaceBindingsParametersPb { + @JsonProperty("add") + private Collection add; + + @JsonProperty("remove") + private Collection remove; + + @JsonIgnore private String securableName; + + @JsonIgnore private String securableType; + + public UpdateWorkspaceBindingsParametersPb setAdd(Collection add) { + this.add = add; + return this; + } + + public Collection getAdd() { + return add; + } + + public UpdateWorkspaceBindingsParametersPb setRemove(Collection remove) { + this.remove = remove; + return this; + } + + public Collection getRemove() { + return remove; + } + + public UpdateWorkspaceBindingsParametersPb setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public UpdateWorkspaceBindingsParametersPb setSecurableType(String securableType) { + this.securableType = securableType; + return this; + } + + public String getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBindingsParametersPb that = (UpdateWorkspaceBindingsParametersPb) o; + return Objects.equals(add, that.add) + && Objects.equals(remove, that.remove) + && Objects.equals(securableName, that.securableName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(add, remove, securableName, securableType); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBindingsParametersPb.class) + .add("add", add) + .add("remove", remove) + .add("securableName", securableName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsPb.java new file mode 100755 index 000000000..81ffb7573 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateWorkspaceBindingsPb { + @JsonProperty("assign_workspaces") + private Collection assignWorkspaces; + + @JsonIgnore private String name; + + @JsonProperty("unassign_workspaces") + private Collection unassignWorkspaces; + + public UpdateWorkspaceBindingsPb setAssignWorkspaces(Collection assignWorkspaces) { + this.assignWorkspaces = assignWorkspaces; + return this; + } + + public Collection getAssignWorkspaces() { + return assignWorkspaces; + } + + public UpdateWorkspaceBindingsPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateWorkspaceBindingsPb setUnassignWorkspaces(Collection unassignWorkspaces) { + this.unassignWorkspaces = unassignWorkspaces; + return this; + } + + public Collection getUnassignWorkspaces() { + return unassignWorkspaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBindingsPb that = (UpdateWorkspaceBindingsPb) o; + return Objects.equals(assignWorkspaces, that.assignWorkspaces) + && Objects.equals(name, that.name) + && Objects.equals(unassignWorkspaces, that.unassignWorkspaces); + } + + @Override + public int hashCode() { + return Objects.hash(assignWorkspaces, name, unassignWorkspaces); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBindingsPb.class) + .add("assignWorkspaces", assignWorkspaces) + .add("name", name) + .add("unassignWorkspaces", unassignWorkspaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java index c87e345fd..14c26f7bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java @@ -4,15 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A list of workspace IDs that are bound to the securable */ @Generated +@JsonSerialize( + using = UpdateWorkspaceBindingsResponse.UpdateWorkspaceBindingsResponseSerializer.class) +@JsonDeserialize( + using = UpdateWorkspaceBindingsResponse.UpdateWorkspaceBindingsResponseDeserializer.class) public class UpdateWorkspaceBindingsResponse { /** List of workspace bindings. */ - @JsonProperty("bindings") private Collection bindings; public UpdateWorkspaceBindingsResponse setBindings(Collection bindings) { @@ -43,4 +55,42 @@ public String toString() { .add("bindings", bindings) .toString(); } + + UpdateWorkspaceBindingsResponsePb toPb() { + UpdateWorkspaceBindingsResponsePb pb = new UpdateWorkspaceBindingsResponsePb(); + pb.setBindings(bindings); + + return pb; + } + + static UpdateWorkspaceBindingsResponse fromPb(UpdateWorkspaceBindingsResponsePb pb) { + UpdateWorkspaceBindingsResponse model = new UpdateWorkspaceBindingsResponse(); + model.setBindings(pb.getBindings()); + + return model; + } + + public static class UpdateWorkspaceBindingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceBindingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceBindingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceBindingsResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceBindingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceBindingsResponsePb pb = + mapper.readValue(p, UpdateWorkspaceBindingsResponsePb.class); + return UpdateWorkspaceBindingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponsePb.java new file mode 100755 index 000000000..fa5b1c96b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A list of workspace IDs that are bound to the securable */ +@Generated +class UpdateWorkspaceBindingsResponsePb { + @JsonProperty("bindings") + private Collection bindings; + + public UpdateWorkspaceBindingsResponsePb setBindings(Collection bindings) { + this.bindings = bindings; + return this; + } + + public Collection getBindings() { + return bindings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBindingsResponsePb that = (UpdateWorkspaceBindingsResponsePb) o; + return Objects.equals(bindings, that.bindings); + } + + @Override + public int hashCode() { + return Objects.hash(bindings); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBindingsResponsePb.class) + .add("bindings", bindings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java index 7dea44dd8..bfc0ba03d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -4,48 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Next ID: 17 */ @Generated +@JsonSerialize(using = ValidateCredentialRequest.ValidateCredentialRequestSerializer.class) +@JsonDeserialize(using = ValidateCredentialRequest.ValidateCredentialRequestDeserializer.class) public class ValidateCredentialRequest { /** The AWS IAM role configuration */ - @JsonProperty("aws_iam_role") private AwsIamRole awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; /** Required. The name of an existing credential or long-lived cloud credential to validate. */ - @JsonProperty("credential_name") private String credentialName; /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccount databricksGcpServiceAccount; /** * The name of an existing external location to validate. Only applicable for storage credentials * (purpose is **STORAGE**.) */ - @JsonProperty("external_location_name") private String externalLocationName; /** The purpose of the credential. This should only be used when the credential is specified. */ - @JsonProperty("purpose") private CredentialPurpose purpose; /** * Whether the credential is only usable for read operations. Only applicable for storage * credentials (purpose is **STORAGE**.) */ - @JsonProperty("read_only") private Boolean readOnly; /** The external location url to validate. Only applicable when purpose is **STORAGE**. */ - @JsonProperty("url") private String url; public ValidateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { @@ -163,4 +166,55 @@ public String toString() { .add("url", url) .toString(); } + + ValidateCredentialRequestPb toPb() { + ValidateCredentialRequestPb pb = new ValidateCredentialRequestPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setCredentialName(credentialName); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setExternalLocationName(externalLocationName); + pb.setPurpose(purpose); + pb.setReadOnly(readOnly); + pb.setUrl(url); + + return pb; + } + + static ValidateCredentialRequest fromPb(ValidateCredentialRequestPb pb) { + ValidateCredentialRequest model = new ValidateCredentialRequest(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setCredentialName(pb.getCredentialName()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setExternalLocationName(pb.getExternalLocationName()); + model.setPurpose(pb.getPurpose()); + model.setReadOnly(pb.getReadOnly()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class ValidateCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ValidateCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValidateCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValidateCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public ValidateCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValidateCredentialRequestPb pb = mapper.readValue(p, ValidateCredentialRequestPb.class); + return ValidateCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequestPb.java new file mode 100755 index 000000000..6d1aa6550 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequestPb.java @@ -0,0 +1,152 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next ID: 17 */ +@Generated +class ValidateCredentialRequestPb { + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + @JsonProperty("credential_name") + private String credentialName; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + + @JsonProperty("external_location_name") + private String externalLocationName; + + @JsonProperty("purpose") + private CredentialPurpose purpose; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("url") + private String url; + + public ValidateCredentialRequestPb setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public ValidateCredentialRequestPb setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public ValidateCredentialRequestPb setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public ValidateCredentialRequestPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public ValidateCredentialRequestPb setExternalLocationName(String externalLocationName) { + this.externalLocationName = externalLocationName; + return this; + } + + public String getExternalLocationName() { + return externalLocationName; + } + + public ValidateCredentialRequestPb setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public ValidateCredentialRequestPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public ValidateCredentialRequestPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialRequestPb that = (ValidateCredentialRequestPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(externalLocationName, that.externalLocationName) + && Objects.equals(purpose, that.purpose) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + credentialName, + databricksGcpServiceAccount, + externalLocationName, + purpose, + readOnly, + url); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialRequestPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("credentialName", credentialName) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("externalLocationName", externalLocationName) + .add("purpose", purpose) + .add("readOnly", readOnly) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java index bded52047..17facc3f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ValidateCredentialResponse.ValidateCredentialResponseSerializer.class) +@JsonDeserialize(using = ValidateCredentialResponse.ValidateCredentialResponseDeserializer.class) public class ValidateCredentialResponse { /** * Whether the tested location is a directory in cloud storage. Only applicable for when purpose * is **STORAGE**. */ - @JsonProperty("isDir") private Boolean isDir; /** The results of the validation check. */ - @JsonProperty("results") private Collection results; public ValidateCredentialResponse setIsDir(Boolean isDir) { @@ -59,4 +68,43 @@ public String toString() { .add("results", results) .toString(); } + + ValidateCredentialResponsePb toPb() { + ValidateCredentialResponsePb pb = new ValidateCredentialResponsePb(); + pb.setIsDir(isDir); + pb.setResults(results); + + return pb; + } + + static ValidateCredentialResponse fromPb(ValidateCredentialResponsePb pb) { + ValidateCredentialResponse model = new ValidateCredentialResponse(); + model.setIsDir(pb.getIsDir()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ValidateCredentialResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ValidateCredentialResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValidateCredentialResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValidateCredentialResponseDeserializer + extends JsonDeserializer { + @Override + public ValidateCredentialResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValidateCredentialResponsePb pb = mapper.readValue(p, ValidateCredentialResponsePb.class); + return ValidateCredentialResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponsePb.java new file mode 100755 index 000000000..f92e76f68 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ValidateCredentialResponsePb { + @JsonProperty("isDir") + private Boolean isDir; + + @JsonProperty("results") + private Collection results; + + public ValidateCredentialResponsePb setIsDir(Boolean isDir) { + this.isDir = isDir; + return this; + } + + public Boolean getIsDir() { + return isDir; + } + + public ValidateCredentialResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialResponsePb that = (ValidateCredentialResponsePb) o; + return Objects.equals(isDir, that.isDir) && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(isDir, results); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialResponsePb.class) + .add("isDir", isDir) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java index 23fb6866a..fc2913655 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java @@ -4,45 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ValidateStorageCredential.ValidateStorageCredentialSerializer.class) +@JsonDeserialize(using = ValidateStorageCredential.ValidateStorageCredentialDeserializer.class) public class ValidateStorageCredential { /** The AWS IAM role configuration. */ - @JsonProperty("aws_iam_role") private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ - @JsonProperty("azure_managed_identity") private AzureManagedIdentityRequest azureManagedIdentity; /** The Azure service principal configuration. */ - @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; /** The Cloudflare API token configuration. */ - @JsonProperty("cloudflare_api_token") private CloudflareApiToken cloudflareApiToken; /** The Databricks created GCP service account configuration. */ - @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; /** The name of an existing external location to validate. */ - @JsonProperty("external_location_name") private String externalLocationName; /** Whether the storage credential is only usable for read operations. */ - @JsonProperty("read_only") private Boolean readOnly; /** The name of the storage credential to validate. */ - @JsonProperty("storage_credential_name") private String storageCredentialName; /** The external location url to validate. */ - @JsonProperty("url") private String url; public ValidateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { @@ -173,4 +175,57 @@ public String toString() { .add("url", url) .toString(); } + + ValidateStorageCredentialPb toPb() { + ValidateStorageCredentialPb pb = new ValidateStorageCredentialPb(); + pb.setAwsIamRole(awsIamRole); + pb.setAzureManagedIdentity(azureManagedIdentity); + pb.setAzureServicePrincipal(azureServicePrincipal); + pb.setCloudflareApiToken(cloudflareApiToken); + pb.setDatabricksGcpServiceAccount(databricksGcpServiceAccount); + pb.setExternalLocationName(externalLocationName); + pb.setReadOnly(readOnly); + pb.setStorageCredentialName(storageCredentialName); + pb.setUrl(url); + + return pb; + } + + static ValidateStorageCredential fromPb(ValidateStorageCredentialPb pb) { + ValidateStorageCredential model = new ValidateStorageCredential(); + model.setAwsIamRole(pb.getAwsIamRole()); + model.setAzureManagedIdentity(pb.getAzureManagedIdentity()); + model.setAzureServicePrincipal(pb.getAzureServicePrincipal()); + model.setCloudflareApiToken(pb.getCloudflareApiToken()); + model.setDatabricksGcpServiceAccount(pb.getDatabricksGcpServiceAccount()); + model.setExternalLocationName(pb.getExternalLocationName()); + model.setReadOnly(pb.getReadOnly()); + model.setStorageCredentialName(pb.getStorageCredentialName()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class ValidateStorageCredentialSerializer + extends JsonSerializer { + @Override + public void serialize( + ValidateStorageCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValidateStorageCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValidateStorageCredentialDeserializer + extends JsonDeserializer { + @Override + public ValidateStorageCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValidateStorageCredentialPb pb = mapper.readValue(p, ValidateStorageCredentialPb.class); + return ValidateStorageCredential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialPb.java new file mode 100755 index 000000000..08d802a14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialPb.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ValidateStorageCredentialPb { + @JsonProperty("aws_iam_role") + private AwsIamRoleRequest awsIamRole; + + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityRequest azureManagedIdentity; + + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; + + @JsonProperty("external_location_name") + private String externalLocationName; + + @JsonProperty("read_only") + private Boolean readOnly; + + @JsonProperty("storage_credential_name") + private String storageCredentialName; + + @JsonProperty("url") + private String url; + + public ValidateStorageCredentialPb setAwsIamRole(AwsIamRoleRequest awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleRequest getAwsIamRole() { + return awsIamRole; + } + + public ValidateStorageCredentialPb setAzureManagedIdentity( + AzureManagedIdentityRequest azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityRequest getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public ValidateStorageCredentialPb setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public ValidateStorageCredentialPb setCloudflareApiToken(CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public ValidateStorageCredentialPb setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public ValidateStorageCredentialPb setExternalLocationName(String externalLocationName) { + this.externalLocationName = externalLocationName; + return this; + } + + public String getExternalLocationName() { + return externalLocationName; + } + + public ValidateStorageCredentialPb setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + public ValidateStorageCredentialPb setStorageCredentialName(String storageCredentialName) { + this.storageCredentialName = storageCredentialName; + return this; + } + + public String getStorageCredentialName() { + return storageCredentialName; + } + + public ValidateStorageCredentialPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateStorageCredentialPb that = (ValidateStorageCredentialPb) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(externalLocationName, that.externalLocationName) + && Objects.equals(readOnly, that.readOnly) + && Objects.equals(storageCredentialName, that.storageCredentialName) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + databricksGcpServiceAccount, + externalLocationName, + readOnly, + storageCredentialName, + url); + } + + @Override + public String toString() { + return new ToStringer(ValidateStorageCredentialPb.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("externalLocationName", externalLocationName) + .add("readOnly", readOnly) + .add("storageCredentialName", storageCredentialName) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java index 94c61e8a9..627756529 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ValidateStorageCredentialResponse.ValidateStorageCredentialResponseSerializer.class) +@JsonDeserialize( + using = ValidateStorageCredentialResponse.ValidateStorageCredentialResponseDeserializer.class) public class ValidateStorageCredentialResponse { /** Whether the tested location is a directory in cloud storage. */ - @JsonProperty("isDir") private Boolean isDir; /** The results of the validation check. */ - @JsonProperty("results") private Collection results; public ValidateStorageCredentialResponse setIsDir(Boolean isDir) { @@ -56,4 +67,44 @@ public String toString() { .add("results", results) .toString(); } + + ValidateStorageCredentialResponsePb toPb() { + ValidateStorageCredentialResponsePb pb = new ValidateStorageCredentialResponsePb(); + pb.setIsDir(isDir); + pb.setResults(results); + + return pb; + } + + static ValidateStorageCredentialResponse fromPb(ValidateStorageCredentialResponsePb pb) { + ValidateStorageCredentialResponse model = new ValidateStorageCredentialResponse(); + model.setIsDir(pb.getIsDir()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ValidateStorageCredentialResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ValidateStorageCredentialResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValidateStorageCredentialResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValidateStorageCredentialResponseDeserializer + extends JsonDeserializer { + @Override + public ValidateStorageCredentialResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValidateStorageCredentialResponsePb pb = + mapper.readValue(p, ValidateStorageCredentialResponsePb.class); + return ValidateStorageCredentialResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponsePb.java new file mode 100755 index 000000000..34f396901 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ValidateStorageCredentialResponsePb { + @JsonProperty("isDir") + private Boolean isDir; + + @JsonProperty("results") + private Collection results; + + public ValidateStorageCredentialResponsePb setIsDir(Boolean isDir) { + this.isDir = isDir; + return this; + } + + public Boolean getIsDir() { + return isDir; + } + + public ValidateStorageCredentialResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateStorageCredentialResponsePb that = (ValidateStorageCredentialResponsePb) o; + return Objects.equals(isDir, that.isDir) && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(isDir, results); + } + + @Override + public String toString() { + return new ToStringer(ValidateStorageCredentialResponsePb.class) + .add("isDir", isDir) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java index 628e2b373..d972a87a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ValidationResult.ValidationResultSerializer.class) +@JsonDeserialize(using = ValidationResult.ValidationResultDeserializer.class) public class ValidationResult { /** Error message would exist when the result does not equal to **PASS**. */ - @JsonProperty("message") private String message; /** The operation tested. */ - @JsonProperty("operation") private ValidationResultOperation operation; /** The results of the tested operation. */ - @JsonProperty("result") private ValidationResultResult result; public ValidationResult setMessage(String message) { @@ -71,4 +79,42 @@ public String toString() { .add("result", result) .toString(); } + + ValidationResultPb toPb() { + ValidationResultPb pb = new ValidationResultPb(); + pb.setMessage(message); + pb.setOperation(operation); + pb.setResult(result); + + return pb; + } + + static ValidationResult fromPb(ValidationResultPb pb) { + ValidationResult model = new ValidationResult(); + model.setMessage(pb.getMessage()); + model.setOperation(pb.getOperation()); + model.setResult(pb.getResult()); + + return model; + } + + public static class ValidationResultSerializer extends JsonSerializer { + @Override + public void serialize(ValidationResult value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValidationResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValidationResultDeserializer extends JsonDeserializer { + @Override + public ValidationResult deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValidationResultPb pb = mapper.readValue(p, ValidationResultPb.class); + return ValidationResult.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultPb.java new file mode 100755 index 000000000..52c89b9cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ValidationResultPb { + @JsonProperty("message") + private String message; + + @JsonProperty("operation") + private ValidationResultOperation operation; + + @JsonProperty("result") + private ValidationResultResult result; + + public ValidationResultPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public ValidationResultPb setOperation(ValidationResultOperation operation) { + this.operation = operation; + return this; + } + + public ValidationResultOperation getOperation() { + return operation; + } + + public ValidationResultPb setResult(ValidationResultResult result) { + this.result = result; + return this; + } + + public ValidationResultResult getResult() { + return result; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidationResultPb that = (ValidationResultPb) o; + return Objects.equals(message, that.message) + && Objects.equals(operation, that.operation) + && Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(message, operation, result); + } + + @Override + public String toString() { + return new ToStringer(ValidationResultPb.class) + .add("message", message) + .add("operation", operation) + .add("result", result) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java index ea18910d4..47c16d866 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java @@ -4,76 +4,71 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = VolumeInfo.VolumeInfoSerializer.class) +@JsonDeserialize(using = VolumeInfo.VolumeInfoDeserializer.class) public class VolumeInfo { /** The AWS access point to use when accesing s3 for this external location. */ - @JsonProperty("access_point") private String accessPoint; /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. */ - @JsonProperty("browse_only") private Boolean browseOnly; /** The name of the catalog where the schema and the volume are */ - @JsonProperty("catalog_name") private String catalogName; /** The comment attached to the volume */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("created_at") private Long createdAt; /** The identifier of the user who created the volume */ - @JsonProperty("created_by") private String createdBy; /** Encryption options that apply to clients connecting to cloud storage. */ - @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; /** The three-level (fully qualified) name of the volume */ - @JsonProperty("full_name") private String fullName; /** The unique identifier of the metastore */ - @JsonProperty("metastore_id") private String metastoreId; /** The name of the volume */ - @JsonProperty("name") private String name; /** The identifier of the user who owns the volume */ - @JsonProperty("owner") private String owner; /** The name of the schema where the volume is */ - @JsonProperty("schema_name") private String schemaName; /** The storage location on the cloud */ - @JsonProperty("storage_location") private String storageLocation; /** */ - @JsonProperty("updated_at") private Long updatedAt; /** The identifier of the user who updated the volume last time */ - @JsonProperty("updated_by") private String updatedBy; /** The unique identifier of the volume */ - @JsonProperty("volume_id") private String volumeId; /** @@ -83,7 +78,6 @@ public class VolumeInfo { * *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external */ - @JsonProperty("volume_type") private VolumeType volumeType; public VolumeInfo setAccessPoint(String accessPoint) { @@ -307,4 +301,69 @@ public String toString() { .add("volumeType", volumeType) .toString(); } + + VolumeInfoPb toPb() { + VolumeInfoPb pb = new VolumeInfoPb(); + pb.setAccessPoint(accessPoint); + pb.setBrowseOnly(browseOnly); + pb.setCatalogName(catalogName); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEncryptionDetails(encryptionDetails); + pb.setFullName(fullName); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setSchemaName(schemaName); + pb.setStorageLocation(storageLocation); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setVolumeId(volumeId); + pb.setVolumeType(volumeType); + + return pb; + } + + static VolumeInfo fromPb(VolumeInfoPb pb) { + VolumeInfo model = new VolumeInfo(); + model.setAccessPoint(pb.getAccessPoint()); + model.setBrowseOnly(pb.getBrowseOnly()); + model.setCatalogName(pb.getCatalogName()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEncryptionDetails(pb.getEncryptionDetails()); + model.setFullName(pb.getFullName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setSchemaName(pb.getSchemaName()); + model.setStorageLocation(pb.getStorageLocation()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setVolumeId(pb.getVolumeId()); + model.setVolumeType(pb.getVolumeType()); + + return model; + } + + public static class VolumeInfoSerializer extends JsonSerializer { + @Override + public void serialize(VolumeInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VolumeInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VolumeInfoDeserializer extends JsonDeserializer { + @Override + public VolumeInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VolumeInfoPb pb = mapper.readValue(p, VolumeInfoPb.class); + return VolumeInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfoPb.java new file mode 100755 index 000000000..4e7db19a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfoPb.java @@ -0,0 +1,284 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class VolumeInfoPb { + @JsonProperty("access_point") + private String accessPoint; + + @JsonProperty("browse_only") + private Boolean browseOnly; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("encryption_details") + private EncryptionDetails encryptionDetails; + + @JsonProperty("full_name") + private String fullName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("volume_id") + private String volumeId; + + @JsonProperty("volume_type") + private VolumeType volumeType; + + public VolumeInfoPb setAccessPoint(String accessPoint) { + this.accessPoint = accessPoint; + return this; + } + + public String getAccessPoint() { + return accessPoint; + } + + public VolumeInfoPb setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public VolumeInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public VolumeInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public VolumeInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public VolumeInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public VolumeInfoPb setEncryptionDetails(EncryptionDetails encryptionDetails) { + this.encryptionDetails = encryptionDetails; + return this; + } + + public EncryptionDetails getEncryptionDetails() { + return encryptionDetails; + } + + public VolumeInfoPb setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public VolumeInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public VolumeInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public VolumeInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public VolumeInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public VolumeInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public VolumeInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public VolumeInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public VolumeInfoPb setVolumeId(String volumeId) { + this.volumeId = volumeId; + return this; + } + + public String getVolumeId() { + return volumeId; + } + + public VolumeInfoPb setVolumeType(VolumeType volumeType) { + this.volumeType = volumeType; + return this; + } + + public VolumeType getVolumeType() { + return volumeType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VolumeInfoPb that = (VolumeInfoPb) o; + return Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(encryptionDetails, that.encryptionDetails) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(volumeId, that.volumeId) + && Objects.equals(volumeType, that.volumeType); + } + + @Override + public int hashCode() { + return Objects.hash( + accessPoint, + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + encryptionDetails, + fullName, + metastoreId, + name, + owner, + schemaName, + storageLocation, + updatedAt, + updatedBy, + volumeId, + volumeType); + } + + @Override + public String toString() { + return new ToStringer(VolumeInfoPb.class) + .add("accessPoint", accessPoint) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("encryptionDetails", encryptionDetails) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("schemaName", schemaName) + .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("volumeId", volumeId) + .add("volumeType", volumeType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java index 00fe08a18..245c6d5c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java @@ -21,7 +21,7 @@ public VolumeInfo create(CreateVolumeRequestContent request) { String path = "/api/2.1/unity-catalog/volumes"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, VolumeInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteVolumeRequest request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public ListVolumesResponseContent list(ListVolumesRequest request) { String path = "/api/2.1/unity-catalog/volumes"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListVolumesResponseContent.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public VolumeInfo read(ReadVolumeRequest request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, VolumeInfo.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public VolumeInfo update(UpdateVolumeRequestContent request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, VolumeInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java index 0d0c8653a..3ee4b9519 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WorkspaceBinding.WorkspaceBindingSerializer.class) +@JsonDeserialize(using = WorkspaceBinding.WorkspaceBindingDeserializer.class) public class WorkspaceBinding { /** One of READ_WRITE/READ_ONLY. Default is READ_WRITE. */ - @JsonProperty("binding_type") private WorkspaceBindingBindingType bindingType; /** Required */ - @JsonProperty("workspace_id") private Long workspaceId; public WorkspaceBinding setBindingType(WorkspaceBindingBindingType bindingType) { @@ -56,4 +65,40 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + WorkspaceBindingPb toPb() { + WorkspaceBindingPb pb = new WorkspaceBindingPb(); + pb.setBindingType(bindingType); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static WorkspaceBinding fromPb(WorkspaceBindingPb pb) { + WorkspaceBinding model = new WorkspaceBinding(); + model.setBindingType(pb.getBindingType()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class WorkspaceBindingSerializer extends JsonSerializer { + @Override + public void serialize(WorkspaceBinding value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceBindingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceBindingDeserializer extends JsonDeserializer { + @Override + public WorkspaceBinding deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceBindingPb pb = mapper.readValue(p, WorkspaceBindingPb.class); + return WorkspaceBinding.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingPb.java new file mode 100755 index 000000000..95f07b63a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WorkspaceBindingPb { + @JsonProperty("binding_type") + private WorkspaceBindingBindingType bindingType; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public WorkspaceBindingPb setBindingType(WorkspaceBindingBindingType bindingType) { + this.bindingType = bindingType; + return this; + } + + public WorkspaceBindingBindingType getBindingType() { + return bindingType; + } + + public WorkspaceBindingPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceBindingPb that = (WorkspaceBindingPb) o; + return Objects.equals(bindingType, that.bindingType) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(bindingType, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceBindingPb.class) + .add("bindingType", bindingType) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java index ab6b07cbd..beec26722 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java @@ -22,7 +22,7 @@ public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest reques String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetCatalogWorkspaceBindingsResponse.class); } catch (IOException e) { @@ -38,7 +38,7 @@ public GetWorkspaceBindingsResponse getBindings(GetBindingsRequest request) { request.getSecurableType(), request.getSecurableName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetWorkspaceBindingsResponse.class); } catch (IOException e) { @@ -52,7 +52,7 @@ public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings req String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateCatalogWorkspaceBindingsResponse.class); @@ -69,7 +69,7 @@ public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsPar request.getSecurableType(), request.getSecurableName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateWorkspaceBindingsResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java index 08962f135..233da66cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java @@ -4,29 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoom.CleanRoomSerializer.class) +@JsonDeserialize(using = CleanRoom.CleanRoomDeserializer.class) public class CleanRoom { /** * Whether clean room access is restricted due to [CSP] * *

[CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html */ - @JsonProperty("access_restricted") private CleanRoomAccessRestricted accessRestricted; /** */ - @JsonProperty("comment") private String comment; /** When the clean room was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** The alias of the collaborator tied to the local clean room. */ - @JsonProperty("local_collaborator_alias") private String localCollaboratorAlias; /** @@ -35,36 +42,30 @@ public class CleanRoom { *

[UC securable naming requirements]: * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements */ - @JsonProperty("name") private String name; /** * Output catalog of the clean room. It is an output only field. Output catalog is manipulated * using the separate CreateCleanRoomOutputCatalog API. */ - @JsonProperty("output_catalog") private CleanRoomOutputCatalog outputCatalog; /** * This is Databricks username of the owner of the local clean room securable for permission * management. */ - @JsonProperty("owner") private String owner; /** * Central clean room details. During creation, users need to specify cloud_vendor, region, and * collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call. */ - @JsonProperty("remote_detailed_info") private CleanRoomRemoteDetail remoteDetailedInfo; /** Clean room status. */ - @JsonProperty("status") private CleanRoomStatusEnum status; /** When the clean room was last updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; public CleanRoom setAccessRestricted(CleanRoomAccessRestricted accessRestricted) { @@ -204,4 +205,55 @@ public String toString() { .add("updatedAt", updatedAt) .toString(); } + + CleanRoomPb toPb() { + CleanRoomPb pb = new CleanRoomPb(); + pb.setAccessRestricted(accessRestricted); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setLocalCollaboratorAlias(localCollaboratorAlias); + pb.setName(name); + pb.setOutputCatalog(outputCatalog); + pb.setOwner(owner); + pb.setRemoteDetailedInfo(remoteDetailedInfo); + pb.setStatus(status); + pb.setUpdatedAt(updatedAt); + + return pb; + } + + static CleanRoom fromPb(CleanRoomPb pb) { + CleanRoom model = new CleanRoom(); + model.setAccessRestricted(pb.getAccessRestricted()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setLocalCollaboratorAlias(pb.getLocalCollaboratorAlias()); + model.setName(pb.getName()); + model.setOutputCatalog(pb.getOutputCatalog()); + model.setOwner(pb.getOwner()); + model.setRemoteDetailedInfo(pb.getRemoteDetailedInfo()); + model.setStatus(pb.getStatus()); + model.setUpdatedAt(pb.getUpdatedAt()); + + return model; + } + + public static class CleanRoomSerializer extends JsonSerializer { + @Override + public void serialize(CleanRoom value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomDeserializer extends JsonDeserializer { + @Override + public CleanRoom deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomPb pb = mapper.readValue(p, CleanRoomPb.class); + return CleanRoom.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java index 367a7360c..f48fa1d13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java @@ -4,32 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Metadata of the clean room asset */ @Generated +@JsonSerialize(using = CleanRoomAsset.CleanRoomAssetSerializer.class) +@JsonDeserialize(using = CleanRoomAsset.CleanRoomAssetDeserializer.class) public class CleanRoomAsset { /** When the asset is added to the clean room, in epoch milliseconds. */ - @JsonProperty("added_at") private Long addedAt; /** The type of the asset. */ - @JsonProperty("asset_type") private CleanRoomAssetAssetType assetType; /** * Foreign table details available to all collaborators of the clean room. Present if and only if * **asset_type** is **FOREIGN_TABLE** */ - @JsonProperty("foreign_table") private CleanRoomAssetForeignTable foreignTable; /** * Local details for a foreign that are only available to its owner. Present if and only if * **asset_type** is **FOREIGN_TABLE** */ - @JsonProperty("foreign_table_local_details") private CleanRoomAssetForeignTableLocalDetails foreignTableLocalDetails; /** @@ -41,57 +48,48 @@ public class CleanRoomAsset { * *

For notebooks, the name is the notebook file name. */ - @JsonProperty("name") private String name; /** * Notebook details available to all collaborators of the clean room. Present if and only if * **asset_type** is **NOTEBOOK_FILE** */ - @JsonProperty("notebook") private CleanRoomAssetNotebook notebook; /** The alias of the collaborator who owns this asset */ - @JsonProperty("owner_collaborator_alias") private String ownerCollaboratorAlias; /** Status of the asset */ - @JsonProperty("status") private CleanRoomAssetStatusEnum status; /** * Table details available to all collaborators of the clean room. Present if and only if * **asset_type** is **TABLE** */ - @JsonProperty("table") private CleanRoomAssetTable table; /** * Local details for a table that are only available to its owner. Present if and only if * **asset_type** is **TABLE** */ - @JsonProperty("table_local_details") private CleanRoomAssetTableLocalDetails tableLocalDetails; /** * View details available to all collaborators of the clean room. Present if and only if * **asset_type** is **VIEW** */ - @JsonProperty("view") private CleanRoomAssetView view; /** * Local details for a view that are only available to its owner. Present if and only if * **asset_type** is **VIEW** */ - @JsonProperty("view_local_details") private CleanRoomAssetViewLocalDetails viewLocalDetails; /** * Local details for a volume that are only available to its owner. Present if and only if * **asset_type** is **VOLUME** */ - @JsonProperty("volume_local_details") private CleanRoomAssetVolumeLocalDetails volumeLocalDetails; public CleanRoomAsset setAddedAt(Long addedAt) { @@ -268,4 +266,62 @@ public String toString() { .add("volumeLocalDetails", volumeLocalDetails) .toString(); } + + CleanRoomAssetPb toPb() { + CleanRoomAssetPb pb = new CleanRoomAssetPb(); + pb.setAddedAt(addedAt); + pb.setAssetType(assetType); + pb.setForeignTable(foreignTable); + pb.setForeignTableLocalDetails(foreignTableLocalDetails); + pb.setName(name); + pb.setNotebook(notebook); + pb.setOwnerCollaboratorAlias(ownerCollaboratorAlias); + pb.setStatus(status); + pb.setTable(table); + pb.setTableLocalDetails(tableLocalDetails); + pb.setView(view); + pb.setViewLocalDetails(viewLocalDetails); + pb.setVolumeLocalDetails(volumeLocalDetails); + + return pb; + } + + static CleanRoomAsset fromPb(CleanRoomAssetPb pb) { + CleanRoomAsset model = new CleanRoomAsset(); + model.setAddedAt(pb.getAddedAt()); + model.setAssetType(pb.getAssetType()); + model.setForeignTable(pb.getForeignTable()); + model.setForeignTableLocalDetails(pb.getForeignTableLocalDetails()); + model.setName(pb.getName()); + model.setNotebook(pb.getNotebook()); + model.setOwnerCollaboratorAlias(pb.getOwnerCollaboratorAlias()); + model.setStatus(pb.getStatus()); + model.setTable(pb.getTable()); + model.setTableLocalDetails(pb.getTableLocalDetails()); + model.setView(pb.getView()); + model.setViewLocalDetails(pb.getViewLocalDetails()); + model.setVolumeLocalDetails(pb.getVolumeLocalDetails()); + + return model; + } + + public static class CleanRoomAssetSerializer extends JsonSerializer { + @Override + public void serialize(CleanRoomAsset value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetDeserializer extends JsonDeserializer { + @Override + public CleanRoomAsset deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetPb pb = mapper.readValue(p, CleanRoomAssetPb.class); + return CleanRoomAsset.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java index 0e21a2f9f..892819e09 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomAssetForeignTable.CleanRoomAssetForeignTableSerializer.class) +@JsonDeserialize(using = CleanRoomAssetForeignTable.CleanRoomAssetForeignTableDeserializer.class) public class CleanRoomAssetForeignTable { /** The metadata information of the columns in the foreign table */ - @JsonProperty("columns") private Collection columns; public CleanRoomAssetForeignTable setColumns( @@ -41,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(CleanRoomAssetForeignTable.class).add("columns", columns).toString(); } + + CleanRoomAssetForeignTablePb toPb() { + CleanRoomAssetForeignTablePb pb = new CleanRoomAssetForeignTablePb(); + pb.setColumns(columns); + + return pb; + } + + static CleanRoomAssetForeignTable fromPb(CleanRoomAssetForeignTablePb pb) { + CleanRoomAssetForeignTable model = new CleanRoomAssetForeignTable(); + model.setColumns(pb.getColumns()); + + return model; + } + + public static class CleanRoomAssetForeignTableSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetForeignTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetForeignTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetForeignTableDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetForeignTable deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetForeignTablePb pb = mapper.readValue(p, CleanRoomAssetForeignTablePb.class); + return CleanRoomAssetForeignTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetails.java index 04e0234e0..99aa82db6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetails.java @@ -4,16 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CleanRoomAssetForeignTableLocalDetails.CleanRoomAssetForeignTableLocalDetailsSerializer + .class) +@JsonDeserialize( + using = + CleanRoomAssetForeignTableLocalDetails.CleanRoomAssetForeignTableLocalDetailsDeserializer + .class) public class CleanRoomAssetForeignTableLocalDetails { /** * The fully qualified name of the foreign table in its owner's local metastore, in the format of * *catalog*.*schema*.*foreign_table_name* */ - @JsonProperty("local_name") private String localName; public CleanRoomAssetForeignTableLocalDetails setLocalName(String localName) { @@ -44,4 +60,45 @@ public String toString() { .add("localName", localName) .toString(); } + + CleanRoomAssetForeignTableLocalDetailsPb toPb() { + CleanRoomAssetForeignTableLocalDetailsPb pb = new CleanRoomAssetForeignTableLocalDetailsPb(); + pb.setLocalName(localName); + + return pb; + } + + static CleanRoomAssetForeignTableLocalDetails fromPb( + CleanRoomAssetForeignTableLocalDetailsPb pb) { + CleanRoomAssetForeignTableLocalDetails model = new CleanRoomAssetForeignTableLocalDetails(); + model.setLocalName(pb.getLocalName()); + + return model; + } + + public static class CleanRoomAssetForeignTableLocalDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetForeignTableLocalDetails value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CleanRoomAssetForeignTableLocalDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetForeignTableLocalDetailsDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetForeignTableLocalDetails deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetForeignTableLocalDetailsPb pb = + mapper.readValue(p, CleanRoomAssetForeignTableLocalDetailsPb.class); + return CleanRoomAssetForeignTableLocalDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetailsPb.java new file mode 100755 index 000000000..34a32127d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetailsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomAssetForeignTableLocalDetailsPb { + @JsonProperty("local_name") + private String localName; + + public CleanRoomAssetForeignTableLocalDetailsPb setLocalName(String localName) { + this.localName = localName; + return this; + } + + public String getLocalName() { + return localName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetForeignTableLocalDetailsPb that = (CleanRoomAssetForeignTableLocalDetailsPb) o; + return Objects.equals(localName, that.localName); + } + + @Override + public int hashCode() { + return Objects.hash(localName); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetForeignTableLocalDetailsPb.class) + .add("localName", localName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTablePb.java new file mode 100755 index 000000000..6862dd381 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTablePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CleanRoomAssetForeignTablePb { + @JsonProperty("columns") + private Collection columns; + + public CleanRoomAssetForeignTablePb setColumns( + Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetForeignTablePb that = (CleanRoomAssetForeignTablePb) o; + return Objects.equals(columns, that.columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetForeignTablePb.class).add("columns", columns).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java index 299fdd186..6bde7a56a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomAssetNotebook.CleanRoomAssetNotebookSerializer.class) +@JsonDeserialize(using = CleanRoomAssetNotebook.CleanRoomAssetNotebookDeserializer.class) public class CleanRoomAssetNotebook { /** Server generated etag that represents the notebook version. */ - @JsonProperty("etag") private String etag; /** * Base 64 representation of the notebook contents. This is the same format as returned by * :method:workspace/export with the format of **HTML**. */ - @JsonProperty("notebook_content") private String notebookContent; /** top-level status derived from all reviews */ - @JsonProperty("review_state") private CleanRoomNotebookReviewNotebookReviewState reviewState; /** All existing approvals or rejections */ - @JsonProperty("reviews") private Collection reviews; /** collaborators that can run the notebook */ - @JsonProperty("runner_collaborator_aliases") private Collection runnerCollaboratorAliases; public CleanRoomAssetNotebook setEtag(String etag) { @@ -107,4 +113,49 @@ public String toString() { .add("runnerCollaboratorAliases", runnerCollaboratorAliases) .toString(); } + + CleanRoomAssetNotebookPb toPb() { + CleanRoomAssetNotebookPb pb = new CleanRoomAssetNotebookPb(); + pb.setEtag(etag); + pb.setNotebookContent(notebookContent); + pb.setReviewState(reviewState); + pb.setReviews(reviews); + pb.setRunnerCollaboratorAliases(runnerCollaboratorAliases); + + return pb; + } + + static CleanRoomAssetNotebook fromPb(CleanRoomAssetNotebookPb pb) { + CleanRoomAssetNotebook model = new CleanRoomAssetNotebook(); + model.setEtag(pb.getEtag()); + model.setNotebookContent(pb.getNotebookContent()); + model.setReviewState(pb.getReviewState()); + model.setReviews(pb.getReviews()); + model.setRunnerCollaboratorAliases(pb.getRunnerCollaboratorAliases()); + + return model; + } + + public static class CleanRoomAssetNotebookSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetNotebook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetNotebookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetNotebookDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetNotebook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetNotebookPb pb = mapper.readValue(p, CleanRoomAssetNotebookPb.class); + return CleanRoomAssetNotebook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebookPb.java new file mode 100755 index 000000000..c15c6c740 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebookPb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CleanRoomAssetNotebookPb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("notebook_content") + private String notebookContent; + + @JsonProperty("review_state") + private CleanRoomNotebookReviewNotebookReviewState reviewState; + + @JsonProperty("reviews") + private Collection reviews; + + @JsonProperty("runner_collaborator_aliases") + private Collection runnerCollaboratorAliases; + + public CleanRoomAssetNotebookPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public CleanRoomAssetNotebookPb setNotebookContent(String notebookContent) { + this.notebookContent = notebookContent; + return this; + } + + public String getNotebookContent() { + return notebookContent; + } + + public CleanRoomAssetNotebookPb setReviewState( + CleanRoomNotebookReviewNotebookReviewState reviewState) { + this.reviewState = reviewState; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewState getReviewState() { + return reviewState; + } + + public CleanRoomAssetNotebookPb setReviews(Collection reviews) { + this.reviews = reviews; + return this; + } + + public Collection getReviews() { + return reviews; + } + + public CleanRoomAssetNotebookPb setRunnerCollaboratorAliases( + Collection runnerCollaboratorAliases) { + this.runnerCollaboratorAliases = runnerCollaboratorAliases; + return this; + } + + public Collection getRunnerCollaboratorAliases() { + return runnerCollaboratorAliases; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetNotebookPb that = (CleanRoomAssetNotebookPb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(notebookContent, that.notebookContent) + && Objects.equals(reviewState, that.reviewState) + && Objects.equals(reviews, that.reviews) + && Objects.equals(runnerCollaboratorAliases, that.runnerCollaboratorAliases); + } + + @Override + public int hashCode() { + return Objects.hash(etag, notebookContent, reviewState, reviews, runnerCollaboratorAliases); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetNotebookPb.class) + .add("etag", etag) + .add("notebookContent", notebookContent) + .add("reviewState", reviewState) + .add("reviews", reviews) + .add("runnerCollaboratorAliases", runnerCollaboratorAliases) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetPb.java new file mode 100755 index 000000000..0c7b30592 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetPb.java @@ -0,0 +1,227 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metadata of the clean room asset */ +@Generated +class CleanRoomAssetPb { + @JsonProperty("added_at") + private Long addedAt; + + @JsonProperty("asset_type") + private CleanRoomAssetAssetType assetType; + + @JsonProperty("foreign_table") + private CleanRoomAssetForeignTable foreignTable; + + @JsonProperty("foreign_table_local_details") + private CleanRoomAssetForeignTableLocalDetails foreignTableLocalDetails; + + @JsonProperty("name") + private String name; + + @JsonProperty("notebook") + private CleanRoomAssetNotebook notebook; + + @JsonProperty("owner_collaborator_alias") + private String ownerCollaboratorAlias; + + @JsonProperty("status") + private CleanRoomAssetStatusEnum status; + + @JsonProperty("table") + private CleanRoomAssetTable table; + + @JsonProperty("table_local_details") + private CleanRoomAssetTableLocalDetails tableLocalDetails; + + @JsonProperty("view") + private CleanRoomAssetView view; + + @JsonProperty("view_local_details") + private CleanRoomAssetViewLocalDetails viewLocalDetails; + + @JsonProperty("volume_local_details") + private CleanRoomAssetVolumeLocalDetails volumeLocalDetails; + + public CleanRoomAssetPb setAddedAt(Long addedAt) { + this.addedAt = addedAt; + return this; + } + + public Long getAddedAt() { + return addedAt; + } + + public CleanRoomAssetPb setAssetType(CleanRoomAssetAssetType assetType) { + this.assetType = assetType; + return this; + } + + public CleanRoomAssetAssetType getAssetType() { + return assetType; + } + + public CleanRoomAssetPb setForeignTable(CleanRoomAssetForeignTable foreignTable) { + this.foreignTable = foreignTable; + return this; + } + + public CleanRoomAssetForeignTable getForeignTable() { + return foreignTable; + } + + public CleanRoomAssetPb setForeignTableLocalDetails( + CleanRoomAssetForeignTableLocalDetails foreignTableLocalDetails) { + this.foreignTableLocalDetails = foreignTableLocalDetails; + return this; + } + + public CleanRoomAssetForeignTableLocalDetails getForeignTableLocalDetails() { + return foreignTableLocalDetails; + } + + public CleanRoomAssetPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CleanRoomAssetPb setNotebook(CleanRoomAssetNotebook notebook) { + this.notebook = notebook; + return this; + } + + public CleanRoomAssetNotebook getNotebook() { + return notebook; + } + + public CleanRoomAssetPb setOwnerCollaboratorAlias(String ownerCollaboratorAlias) { + this.ownerCollaboratorAlias = ownerCollaboratorAlias; + return this; + } + + public String getOwnerCollaboratorAlias() { + return ownerCollaboratorAlias; + } + + public CleanRoomAssetPb setStatus(CleanRoomAssetStatusEnum status) { + this.status = status; + return this; + } + + public CleanRoomAssetStatusEnum getStatus() { + return status; + } + + public CleanRoomAssetPb setTable(CleanRoomAssetTable table) { + this.table = table; + return this; + } + + public CleanRoomAssetTable getTable() { + return table; + } + + public CleanRoomAssetPb setTableLocalDetails(CleanRoomAssetTableLocalDetails tableLocalDetails) { + this.tableLocalDetails = tableLocalDetails; + return this; + } + + public CleanRoomAssetTableLocalDetails getTableLocalDetails() { + return tableLocalDetails; + } + + public CleanRoomAssetPb setView(CleanRoomAssetView view) { + this.view = view; + return this; + } + + public CleanRoomAssetView getView() { + return view; + } + + public CleanRoomAssetPb setViewLocalDetails(CleanRoomAssetViewLocalDetails viewLocalDetails) { + this.viewLocalDetails = viewLocalDetails; + return this; + } + + public CleanRoomAssetViewLocalDetails getViewLocalDetails() { + return viewLocalDetails; + } + + public CleanRoomAssetPb setVolumeLocalDetails( + CleanRoomAssetVolumeLocalDetails volumeLocalDetails) { + this.volumeLocalDetails = volumeLocalDetails; + return this; + } + + public CleanRoomAssetVolumeLocalDetails getVolumeLocalDetails() { + return volumeLocalDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetPb that = (CleanRoomAssetPb) o; + return Objects.equals(addedAt, that.addedAt) + && Objects.equals(assetType, that.assetType) + && Objects.equals(foreignTable, that.foreignTable) + && Objects.equals(foreignTableLocalDetails, that.foreignTableLocalDetails) + && Objects.equals(name, that.name) + && Objects.equals(notebook, that.notebook) + && Objects.equals(ownerCollaboratorAlias, that.ownerCollaboratorAlias) + && Objects.equals(status, that.status) + && Objects.equals(table, that.table) + && Objects.equals(tableLocalDetails, that.tableLocalDetails) + && Objects.equals(view, that.view) + && Objects.equals(viewLocalDetails, that.viewLocalDetails) + && Objects.equals(volumeLocalDetails, that.volumeLocalDetails); + } + + @Override + public int hashCode() { + return Objects.hash( + addedAt, + assetType, + foreignTable, + foreignTableLocalDetails, + name, + notebook, + ownerCollaboratorAlias, + status, + table, + tableLocalDetails, + view, + viewLocalDetails, + volumeLocalDetails); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetPb.class) + .add("addedAt", addedAt) + .add("assetType", assetType) + .add("foreignTable", foreignTable) + .add("foreignTableLocalDetails", foreignTableLocalDetails) + .add("name", name) + .add("notebook", notebook) + .add("ownerCollaboratorAlias", ownerCollaboratorAlias) + .add("status", status) + .add("table", table) + .add("tableLocalDetails", tableLocalDetails) + .add("view", view) + .add("viewLocalDetails", viewLocalDetails) + .add("volumeLocalDetails", volumeLocalDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTable.java index 887931ea2..e38f40037 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTable.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomAssetTable.CleanRoomAssetTableSerializer.class) +@JsonDeserialize(using = CleanRoomAssetTable.CleanRoomAssetTableDeserializer.class) public class CleanRoomAssetTable { /** The metadata information of the columns in the table */ - @JsonProperty("columns") private Collection columns; public CleanRoomAssetTable setColumns( @@ -41,4 +51,39 @@ public int hashCode() { public String toString() { return new ToStringer(CleanRoomAssetTable.class).add("columns", columns).toString(); } + + CleanRoomAssetTablePb toPb() { + CleanRoomAssetTablePb pb = new CleanRoomAssetTablePb(); + pb.setColumns(columns); + + return pb; + } + + static CleanRoomAssetTable fromPb(CleanRoomAssetTablePb pb) { + CleanRoomAssetTable model = new CleanRoomAssetTable(); + model.setColumns(pb.getColumns()); + + return model; + } + + public static class CleanRoomAssetTableSerializer extends JsonSerializer { + @Override + public void serialize(CleanRoomAssetTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetTableDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetTable deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetTablePb pb = mapper.readValue(p, CleanRoomAssetTablePb.class); + return CleanRoomAssetTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java index 308f3b99c..911f0d813 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = CleanRoomAssetTableLocalDetails.CleanRoomAssetTableLocalDetailsSerializer.class) +@JsonDeserialize( + using = CleanRoomAssetTableLocalDetails.CleanRoomAssetTableLocalDetailsDeserializer.class) public class CleanRoomAssetTableLocalDetails { /** * The fully qualified name of the table in its owner's local metastore, in the format of * *catalog*.*schema*.*table_name* */ - @JsonProperty("local_name") private String localName; /** Partition filtering specification for a shared table. */ - @JsonProperty("partitions") private Collection partitions; public CleanRoomAssetTableLocalDetails setLocalName(String localName) { @@ -60,4 +71,44 @@ public String toString() { .add("partitions", partitions) .toString(); } + + CleanRoomAssetTableLocalDetailsPb toPb() { + CleanRoomAssetTableLocalDetailsPb pb = new CleanRoomAssetTableLocalDetailsPb(); + pb.setLocalName(localName); + pb.setPartitions(partitions); + + return pb; + } + + static CleanRoomAssetTableLocalDetails fromPb(CleanRoomAssetTableLocalDetailsPb pb) { + CleanRoomAssetTableLocalDetails model = new CleanRoomAssetTableLocalDetails(); + model.setLocalName(pb.getLocalName()); + model.setPartitions(pb.getPartitions()); + + return model; + } + + public static class CleanRoomAssetTableLocalDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetTableLocalDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetTableLocalDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetTableLocalDetailsDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetTableLocalDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetTableLocalDetailsPb pb = + mapper.readValue(p, CleanRoomAssetTableLocalDetailsPb.class); + return CleanRoomAssetTableLocalDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetailsPb.java new file mode 100755 index 000000000..057b75bb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetailsPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CleanRoomAssetTableLocalDetailsPb { + @JsonProperty("local_name") + private String localName; + + @JsonProperty("partitions") + private Collection partitions; + + public CleanRoomAssetTableLocalDetailsPb setLocalName(String localName) { + this.localName = localName; + return this; + } + + public String getLocalName() { + return localName; + } + + public CleanRoomAssetTableLocalDetailsPb setPartitions( + Collection partitions) { + this.partitions = partitions; + return this; + } + + public Collection getPartitions() { + return partitions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetTableLocalDetailsPb that = (CleanRoomAssetTableLocalDetailsPb) o; + return Objects.equals(localName, that.localName) && Objects.equals(partitions, that.partitions); + } + + @Override + public int hashCode() { + return Objects.hash(localName, partitions); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetTableLocalDetailsPb.class) + .add("localName", localName) + .add("partitions", partitions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTablePb.java new file mode 100755 index 000000000..6952378da --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTablePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CleanRoomAssetTablePb { + @JsonProperty("columns") + private Collection columns; + + public CleanRoomAssetTablePb setColumns( + Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetTablePb that = (CleanRoomAssetTablePb) o; + return Objects.equals(columns, that.columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetTablePb.class).add("columns", columns).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetView.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetView.java index 2856e7dca..090fdc9e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetView.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetView.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomAssetView.CleanRoomAssetViewSerializer.class) +@JsonDeserialize(using = CleanRoomAssetView.CleanRoomAssetViewDeserializer.class) public class CleanRoomAssetView { /** The metadata information of the columns in the view */ - @JsonProperty("columns") private Collection columns; public CleanRoomAssetView setColumns( @@ -41,4 +51,38 @@ public int hashCode() { public String toString() { return new ToStringer(CleanRoomAssetView.class).add("columns", columns).toString(); } + + CleanRoomAssetViewPb toPb() { + CleanRoomAssetViewPb pb = new CleanRoomAssetViewPb(); + pb.setColumns(columns); + + return pb; + } + + static CleanRoomAssetView fromPb(CleanRoomAssetViewPb pb) { + CleanRoomAssetView model = new CleanRoomAssetView(); + model.setColumns(pb.getColumns()); + + return model; + } + + public static class CleanRoomAssetViewSerializer extends JsonSerializer { + @Override + public void serialize(CleanRoomAssetView value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetViewPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetViewDeserializer extends JsonDeserializer { + @Override + public CleanRoomAssetView deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetViewPb pb = mapper.readValue(p, CleanRoomAssetViewPb.class); + return CleanRoomAssetView.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetails.java index 8727c5ab7..7903a4b4a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetails.java @@ -4,16 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CleanRoomAssetViewLocalDetails.CleanRoomAssetViewLocalDetailsSerializer.class) +@JsonDeserialize( + using = CleanRoomAssetViewLocalDetails.CleanRoomAssetViewLocalDetailsDeserializer.class) public class CleanRoomAssetViewLocalDetails { /** * The fully qualified name of the view in its owner's local metastore, in the format of * *catalog*.*schema*.*view_name* */ - @JsonProperty("local_name") private String localName; public CleanRoomAssetViewLocalDetails setLocalName(String localName) { @@ -44,4 +56,42 @@ public String toString() { .add("localName", localName) .toString(); } + + CleanRoomAssetViewLocalDetailsPb toPb() { + CleanRoomAssetViewLocalDetailsPb pb = new CleanRoomAssetViewLocalDetailsPb(); + pb.setLocalName(localName); + + return pb; + } + + static CleanRoomAssetViewLocalDetails fromPb(CleanRoomAssetViewLocalDetailsPb pb) { + CleanRoomAssetViewLocalDetails model = new CleanRoomAssetViewLocalDetails(); + model.setLocalName(pb.getLocalName()); + + return model; + } + + public static class CleanRoomAssetViewLocalDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetViewLocalDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetViewLocalDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetViewLocalDetailsDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetViewLocalDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetViewLocalDetailsPb pb = + mapper.readValue(p, CleanRoomAssetViewLocalDetailsPb.class); + return CleanRoomAssetViewLocalDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetailsPb.java new file mode 100755 index 000000000..0ed07a596 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetailsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomAssetViewLocalDetailsPb { + @JsonProperty("local_name") + private String localName; + + public CleanRoomAssetViewLocalDetailsPb setLocalName(String localName) { + this.localName = localName; + return this; + } + + public String getLocalName() { + return localName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetViewLocalDetailsPb that = (CleanRoomAssetViewLocalDetailsPb) o; + return Objects.equals(localName, that.localName); + } + + @Override + public int hashCode() { + return Objects.hash(localName); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetViewLocalDetailsPb.class) + .add("localName", localName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewPb.java new file mode 100755 index 000000000..fadc6c94e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CleanRoomAssetViewPb { + @JsonProperty("columns") + private Collection columns; + + public CleanRoomAssetViewPb setColumns( + Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetViewPb that = (CleanRoomAssetViewPb) o; + return Objects.equals(columns, that.columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetViewPb.class).add("columns", columns).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetails.java index 8899663c9..53889c05b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetails.java @@ -4,16 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CleanRoomAssetVolumeLocalDetails.CleanRoomAssetVolumeLocalDetailsSerializer.class) +@JsonDeserialize( + using = CleanRoomAssetVolumeLocalDetails.CleanRoomAssetVolumeLocalDetailsDeserializer.class) public class CleanRoomAssetVolumeLocalDetails { /** * The fully qualified name of the volume in its owner's local metastore, in the format of * *catalog*.*schema*.*volume_name* */ - @JsonProperty("local_name") private String localName; public CleanRoomAssetVolumeLocalDetails setLocalName(String localName) { @@ -44,4 +56,42 @@ public String toString() { .add("localName", localName) .toString(); } + + CleanRoomAssetVolumeLocalDetailsPb toPb() { + CleanRoomAssetVolumeLocalDetailsPb pb = new CleanRoomAssetVolumeLocalDetailsPb(); + pb.setLocalName(localName); + + return pb; + } + + static CleanRoomAssetVolumeLocalDetails fromPb(CleanRoomAssetVolumeLocalDetailsPb pb) { + CleanRoomAssetVolumeLocalDetails model = new CleanRoomAssetVolumeLocalDetails(); + model.setLocalName(pb.getLocalName()); + + return model; + } + + public static class CleanRoomAssetVolumeLocalDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomAssetVolumeLocalDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomAssetVolumeLocalDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomAssetVolumeLocalDetailsDeserializer + extends JsonDeserializer { + @Override + public CleanRoomAssetVolumeLocalDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomAssetVolumeLocalDetailsPb pb = + mapper.readValue(p, CleanRoomAssetVolumeLocalDetailsPb.class); + return CleanRoomAssetVolumeLocalDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetailsPb.java new file mode 100755 index 000000000..1c3b64f9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetailsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomAssetVolumeLocalDetailsPb { + @JsonProperty("local_name") + private String localName; + + public CleanRoomAssetVolumeLocalDetailsPb setLocalName(String localName) { + this.localName = localName; + return this; + } + + public String getLocalName() { + return localName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomAssetVolumeLocalDetailsPb that = (CleanRoomAssetVolumeLocalDetailsPb) o; + return Objects.equals(localName, that.localName); + } + + @Override + public int hashCode() { + return Objects.hash(localName); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomAssetVolumeLocalDetailsPb.class) + .add("localName", localName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java index 5bd9d49f6..cdb59e7a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java @@ -45,13 +45,12 @@ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) { return impl.create(request); } - public void delete( - String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) { + public void delete(String cleanRoomName, CleanRoomAssetAssetType assetType, String name) { delete( new DeleteCleanRoomAssetRequest() .setCleanRoomName(cleanRoomName) .setAssetType(assetType) - .setAssetFullName(assetFullName)); + .setName(name)); } /** @@ -63,13 +62,12 @@ public void delete(DeleteCleanRoomAssetRequest request) { impl.delete(request); } - public CleanRoomAsset get( - String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) { + public CleanRoomAsset get(String cleanRoomName, CleanRoomAssetAssetType assetType, String name) { return get( new GetCleanRoomAssetRequest() .setCleanRoomName(cleanRoomName) .setAssetType(assetType) - .setAssetFullName(assetFullName)); + .setName(name)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java index 444feb55d..69927caad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java @@ -21,7 +21,7 @@ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/assets", request.getCleanRoomName()); try { Request req = new Request("POST", path, apiClient.serialize(request.getAsset())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CleanRoomAsset.class); @@ -35,10 +35,10 @@ public void delete(DeleteCleanRoomAssetRequest request) { String path = String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", - request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); + request.getCleanRoomName(), request.getAssetType(), request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteCleanRoomAssetResponse.class); } catch (IOException e) { @@ -51,10 +51,10 @@ public CleanRoomAsset get(GetCleanRoomAssetRequest request) { String path = String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", - request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); + request.getCleanRoomName(), request.getAssetType(), request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CleanRoomAsset.class); } catch (IOException e) { @@ -67,7 +67,7 @@ public ListCleanRoomAssetsResponse list(ListCleanRoomAssetsRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/assets", request.getCleanRoomName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListCleanRoomAssetsResponse.class); } catch (IOException e) { @@ -83,7 +83,7 @@ public CleanRoomAsset update(UpdateCleanRoomAssetRequest request) { request.getCleanRoomName(), request.getAssetType(), request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getAsset())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CleanRoomAsset.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java index 88dae5502..6ac4b5082 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Publicly visible clean room collaborator. */ @Generated +@JsonSerialize(using = CleanRoomCollaborator.CleanRoomCollaboratorSerializer.class) +@JsonDeserialize(using = CleanRoomCollaborator.CleanRoomCollaboratorDeserializer.class) public class CleanRoomCollaborator { /** * Collaborator alias specified by the clean room creator. It is unique across all collaborators @@ -19,7 +30,6 @@ public class CleanRoomCollaborator { *

[UC securable naming requirements]: * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements */ - @JsonProperty("collaborator_alias") private String collaboratorAlias; /** @@ -27,14 +37,12 @@ public class CleanRoomCollaborator { * is the clean room name. For x-metastore clean rooms, it is the organization name of the * metastore. It is not restricted to these values and could change in the future */ - @JsonProperty("display_name") private String displayName; /** * The global Unity Catalog metastore id of the collaborator. The identifier is of format * cloud:region:metastore-uuid. */ - @JsonProperty("global_metastore_id") private String globalMetastoreId; /** @@ -42,7 +50,6 @@ public class CleanRoomCollaborator { * creator of the clean room, and non-empty for the invitees of the clean room. It is only * returned in the output when clean room creator calls GET */ - @JsonProperty("invite_recipient_email") private String inviteRecipientEmail; /** @@ -50,14 +57,12 @@ public class CleanRoomCollaborator { * invite_recipient_email is specified. It should be empty when the collaborator is the creator of * the clean room. */ - @JsonProperty("invite_recipient_workspace_id") private Long inviteRecipientWorkspaceId; /** * [Organization name](:method:metastores/list#metastores-delta_sharing_organization_name) * configured in the metastore */ - @JsonProperty("organization_name") private String organizationName; public CleanRoomCollaborator setCollaboratorAlias(String collaboratorAlias) { @@ -149,4 +154,51 @@ public String toString() { .add("organizationName", organizationName) .toString(); } + + CleanRoomCollaboratorPb toPb() { + CleanRoomCollaboratorPb pb = new CleanRoomCollaboratorPb(); + pb.setCollaboratorAlias(collaboratorAlias); + pb.setDisplayName(displayName); + pb.setGlobalMetastoreId(globalMetastoreId); + pb.setInviteRecipientEmail(inviteRecipientEmail); + pb.setInviteRecipientWorkspaceId(inviteRecipientWorkspaceId); + pb.setOrganizationName(organizationName); + + return pb; + } + + static CleanRoomCollaborator fromPb(CleanRoomCollaboratorPb pb) { + CleanRoomCollaborator model = new CleanRoomCollaborator(); + model.setCollaboratorAlias(pb.getCollaboratorAlias()); + model.setDisplayName(pb.getDisplayName()); + model.setGlobalMetastoreId(pb.getGlobalMetastoreId()); + model.setInviteRecipientEmail(pb.getInviteRecipientEmail()); + model.setInviteRecipientWorkspaceId(pb.getInviteRecipientWorkspaceId()); + model.setOrganizationName(pb.getOrganizationName()); + + return model; + } + + public static class CleanRoomCollaboratorSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomCollaborator value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomCollaboratorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomCollaboratorDeserializer + extends JsonDeserializer { + @Override + public CleanRoomCollaborator deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomCollaboratorPb pb = mapper.readValue(p, CleanRoomCollaboratorPb.class); + return CleanRoomCollaborator.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaboratorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaboratorPb.java new file mode 100755 index 000000000..1e5328ebd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaboratorPb.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Publicly visible clean room collaborator. */ +@Generated +class CleanRoomCollaboratorPb { + @JsonProperty("collaborator_alias") + private String collaboratorAlias; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("global_metastore_id") + private String globalMetastoreId; + + @JsonProperty("invite_recipient_email") + private String inviteRecipientEmail; + + @JsonProperty("invite_recipient_workspace_id") + private Long inviteRecipientWorkspaceId; + + @JsonProperty("organization_name") + private String organizationName; + + public CleanRoomCollaboratorPb setCollaboratorAlias(String collaboratorAlias) { + this.collaboratorAlias = collaboratorAlias; + return this; + } + + public String getCollaboratorAlias() { + return collaboratorAlias; + } + + public CleanRoomCollaboratorPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CleanRoomCollaboratorPb setGlobalMetastoreId(String globalMetastoreId) { + this.globalMetastoreId = globalMetastoreId; + return this; + } + + public String getGlobalMetastoreId() { + return globalMetastoreId; + } + + public CleanRoomCollaboratorPb setInviteRecipientEmail(String inviteRecipientEmail) { + this.inviteRecipientEmail = inviteRecipientEmail; + return this; + } + + public String getInviteRecipientEmail() { + return inviteRecipientEmail; + } + + public CleanRoomCollaboratorPb setInviteRecipientWorkspaceId(Long inviteRecipientWorkspaceId) { + this.inviteRecipientWorkspaceId = inviteRecipientWorkspaceId; + return this; + } + + public Long getInviteRecipientWorkspaceId() { + return inviteRecipientWorkspaceId; + } + + public CleanRoomCollaboratorPb setOrganizationName(String organizationName) { + this.organizationName = organizationName; + return this; + } + + public String getOrganizationName() { + return organizationName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomCollaboratorPb that = (CleanRoomCollaboratorPb) o; + return Objects.equals(collaboratorAlias, that.collaboratorAlias) + && Objects.equals(displayName, that.displayName) + && Objects.equals(globalMetastoreId, that.globalMetastoreId) + && Objects.equals(inviteRecipientEmail, that.inviteRecipientEmail) + && Objects.equals(inviteRecipientWorkspaceId, that.inviteRecipientWorkspaceId) + && Objects.equals(organizationName, that.organizationName); + } + + @Override + public int hashCode() { + return Objects.hash( + collaboratorAlias, + displayName, + globalMetastoreId, + inviteRecipientEmail, + inviteRecipientWorkspaceId, + organizationName); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomCollaboratorPb.class) + .add("collaboratorAlias", collaboratorAlias) + .add("displayName", displayName) + .add("globalMetastoreId", globalMetastoreId) + .add("inviteRecipientEmail", inviteRecipientEmail) + .add("inviteRecipientWorkspaceId", inviteRecipientWorkspaceId) + .add("organizationName", organizationName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java index 035bd6b57..6a627c2ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomNotebookReview.CleanRoomNotebookReviewSerializer.class) +@JsonDeserialize(using = CleanRoomNotebookReview.CleanRoomNotebookReviewDeserializer.class) public class CleanRoomNotebookReview { /** review comment */ - @JsonProperty("comment") private String comment; /** timestamp of when the review was submitted */ - @JsonProperty("created_at_millis") private Long createdAtMillis; /** review outcome */ - @JsonProperty("review_state") private CleanRoomNotebookReviewNotebookReviewState reviewState; /** specified when the review was not explicitly made by a user */ - @JsonProperty("review_sub_reason") private CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason; /** collaborator alias of the reviewer */ - @JsonProperty("reviewer_collaborator_alias") private String reviewerCollaboratorAlias; public CleanRoomNotebookReview setComment(String comment) { @@ -104,4 +110,49 @@ public String toString() { .add("reviewerCollaboratorAlias", reviewerCollaboratorAlias) .toString(); } + + CleanRoomNotebookReviewPb toPb() { + CleanRoomNotebookReviewPb pb = new CleanRoomNotebookReviewPb(); + pb.setComment(comment); + pb.setCreatedAtMillis(createdAtMillis); + pb.setReviewState(reviewState); + pb.setReviewSubReason(reviewSubReason); + pb.setReviewerCollaboratorAlias(reviewerCollaboratorAlias); + + return pb; + } + + static CleanRoomNotebookReview fromPb(CleanRoomNotebookReviewPb pb) { + CleanRoomNotebookReview model = new CleanRoomNotebookReview(); + model.setComment(pb.getComment()); + model.setCreatedAtMillis(pb.getCreatedAtMillis()); + model.setReviewState(pb.getReviewState()); + model.setReviewSubReason(pb.getReviewSubReason()); + model.setReviewerCollaboratorAlias(pb.getReviewerCollaboratorAlias()); + + return model; + } + + public static class CleanRoomNotebookReviewSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomNotebookReview value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomNotebookReviewPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomNotebookReviewDeserializer + extends JsonDeserializer { + @Override + public CleanRoomNotebookReview deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomNotebookReviewPb pb = mapper.readValue(p, CleanRoomNotebookReviewPb.class); + return CleanRoomNotebookReview.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewPb.java new file mode 100755 index 000000000..a5c1654c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewPb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomNotebookReviewPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at_millis") + private Long createdAtMillis; + + @JsonProperty("review_state") + private CleanRoomNotebookReviewNotebookReviewState reviewState; + + @JsonProperty("review_sub_reason") + private CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason; + + @JsonProperty("reviewer_collaborator_alias") + private String reviewerCollaboratorAlias; + + public CleanRoomNotebookReviewPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CleanRoomNotebookReviewPb setCreatedAtMillis(Long createdAtMillis) { + this.createdAtMillis = createdAtMillis; + return this; + } + + public Long getCreatedAtMillis() { + return createdAtMillis; + } + + public CleanRoomNotebookReviewPb setReviewState( + CleanRoomNotebookReviewNotebookReviewState reviewState) { + this.reviewState = reviewState; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewState getReviewState() { + return reviewState; + } + + public CleanRoomNotebookReviewPb setReviewSubReason( + CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason) { + this.reviewSubReason = reviewSubReason; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewSubReason getReviewSubReason() { + return reviewSubReason; + } + + public CleanRoomNotebookReviewPb setReviewerCollaboratorAlias(String reviewerCollaboratorAlias) { + this.reviewerCollaboratorAlias = reviewerCollaboratorAlias; + return this; + } + + public String getReviewerCollaboratorAlias() { + return reviewerCollaboratorAlias; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomNotebookReviewPb that = (CleanRoomNotebookReviewPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdAtMillis, that.createdAtMillis) + && Objects.equals(reviewState, that.reviewState) + && Objects.equals(reviewSubReason, that.reviewSubReason) + && Objects.equals(reviewerCollaboratorAlias, that.reviewerCollaboratorAlias); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, createdAtMillis, reviewState, reviewSubReason, reviewerCollaboratorAlias); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomNotebookReviewPb.class) + .add("comment", comment) + .add("createdAtMillis", createdAtMillis) + .add("reviewState", reviewState) + .add("reviewSubReason", reviewSubReason) + .add("reviewerCollaboratorAlias", reviewerCollaboratorAlias) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java index 93cc03a88..9c6755e76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java @@ -4,50 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Stores information about a single task run. */ @Generated +@JsonSerialize(using = CleanRoomNotebookTaskRun.CleanRoomNotebookTaskRunSerializer.class) +@JsonDeserialize(using = CleanRoomNotebookTaskRun.CleanRoomNotebookTaskRunDeserializer.class) public class CleanRoomNotebookTaskRun { /** * Job run info of the task in the runner's local workspace. This field is only included in the * LIST API. if the task was run within the same workspace the API is being called. If the task * run was in a different workspace under the same metastore, only the workspace_id is included. */ - @JsonProperty("collaborator_job_run_info") private CollaboratorJobRunInfo collaboratorJobRunInfo; /** Etag of the notebook executed in this task run, used to identify the notebook version. */ - @JsonProperty("notebook_etag") private String notebookEtag; /** State of the task run. */ - @JsonProperty("notebook_job_run_state") private com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState; /** Asset name of the notebook executed in this task run. */ - @JsonProperty("notebook_name") private String notebookName; /** The timestamp of when the notebook was last updated. */ - @JsonProperty("notebook_updated_at") private Long notebookUpdatedAt; /** Expiration time of the output schema of the task run (if any), in epoch milliseconds. */ - @JsonProperty("output_schema_expiration_time") private Long outputSchemaExpirationTime; /** Name of the output schema associated with the clean rooms notebook task run. */ - @JsonProperty("output_schema_name") private String outputSchemaName; /** Duration of the task run, in milliseconds. */ - @JsonProperty("run_duration") private Long runDuration; /** When the task run started, in epoch milliseconds. */ - @JsonProperty("start_time") private Long startTime; public CleanRoomNotebookTaskRun setCollaboratorJobRunInfo( @@ -177,4 +179,57 @@ public String toString() { .add("startTime", startTime) .toString(); } + + CleanRoomNotebookTaskRunPb toPb() { + CleanRoomNotebookTaskRunPb pb = new CleanRoomNotebookTaskRunPb(); + pb.setCollaboratorJobRunInfo(collaboratorJobRunInfo); + pb.setNotebookEtag(notebookEtag); + pb.setNotebookJobRunState(notebookJobRunState); + pb.setNotebookName(notebookName); + pb.setNotebookUpdatedAt(notebookUpdatedAt); + pb.setOutputSchemaExpirationTime(outputSchemaExpirationTime); + pb.setOutputSchemaName(outputSchemaName); + pb.setRunDuration(runDuration); + pb.setStartTime(startTime); + + return pb; + } + + static CleanRoomNotebookTaskRun fromPb(CleanRoomNotebookTaskRunPb pb) { + CleanRoomNotebookTaskRun model = new CleanRoomNotebookTaskRun(); + model.setCollaboratorJobRunInfo(pb.getCollaboratorJobRunInfo()); + model.setNotebookEtag(pb.getNotebookEtag()); + model.setNotebookJobRunState(pb.getNotebookJobRunState()); + model.setNotebookName(pb.getNotebookName()); + model.setNotebookUpdatedAt(pb.getNotebookUpdatedAt()); + model.setOutputSchemaExpirationTime(pb.getOutputSchemaExpirationTime()); + model.setOutputSchemaName(pb.getOutputSchemaName()); + model.setRunDuration(pb.getRunDuration()); + model.setStartTime(pb.getStartTime()); + + return model; + } + + public static class CleanRoomNotebookTaskRunSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomNotebookTaskRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomNotebookTaskRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomNotebookTaskRunDeserializer + extends JsonDeserializer { + @Override + public CleanRoomNotebookTaskRun deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomNotebookTaskRunPb pb = mapper.readValue(p, CleanRoomNotebookTaskRunPb.class); + return CleanRoomNotebookTaskRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRunPb.java new file mode 100755 index 000000000..816cd47a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRunPb.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Stores information about a single task run. */ +@Generated +class CleanRoomNotebookTaskRunPb { + @JsonProperty("collaborator_job_run_info") + private CollaboratorJobRunInfo collaboratorJobRunInfo; + + @JsonProperty("notebook_etag") + private String notebookEtag; + + @JsonProperty("notebook_job_run_state") + private com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState; + + @JsonProperty("notebook_name") + private String notebookName; + + @JsonProperty("notebook_updated_at") + private Long notebookUpdatedAt; + + @JsonProperty("output_schema_expiration_time") + private Long outputSchemaExpirationTime; + + @JsonProperty("output_schema_name") + private String outputSchemaName; + + @JsonProperty("run_duration") + private Long runDuration; + + @JsonProperty("start_time") + private Long startTime; + + public CleanRoomNotebookTaskRunPb setCollaboratorJobRunInfo( + CollaboratorJobRunInfo collaboratorJobRunInfo) { + this.collaboratorJobRunInfo = collaboratorJobRunInfo; + return this; + } + + public CollaboratorJobRunInfo getCollaboratorJobRunInfo() { + return collaboratorJobRunInfo; + } + + public CleanRoomNotebookTaskRunPb setNotebookEtag(String notebookEtag) { + this.notebookEtag = notebookEtag; + return this; + } + + public String getNotebookEtag() { + return notebookEtag; + } + + public CleanRoomNotebookTaskRunPb setNotebookJobRunState( + com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState) { + this.notebookJobRunState = notebookJobRunState; + return this; + } + + public com.databricks.sdk.service.jobs.CleanRoomTaskRunState getNotebookJobRunState() { + return notebookJobRunState; + } + + public CleanRoomNotebookTaskRunPb setNotebookName(String notebookName) { + this.notebookName = notebookName; + return this; + } + + public String getNotebookName() { + return notebookName; + } + + public CleanRoomNotebookTaskRunPb setNotebookUpdatedAt(Long notebookUpdatedAt) { + this.notebookUpdatedAt = notebookUpdatedAt; + return this; + } + + public Long getNotebookUpdatedAt() { + return notebookUpdatedAt; + } + + public CleanRoomNotebookTaskRunPb setOutputSchemaExpirationTime(Long outputSchemaExpirationTime) { + this.outputSchemaExpirationTime = outputSchemaExpirationTime; + return this; + } + + public Long getOutputSchemaExpirationTime() { + return outputSchemaExpirationTime; + } + + public CleanRoomNotebookTaskRunPb setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public CleanRoomNotebookTaskRunPb setRunDuration(Long runDuration) { + this.runDuration = runDuration; + return this; + } + + public Long getRunDuration() { + return runDuration; + } + + public CleanRoomNotebookTaskRunPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomNotebookTaskRunPb that = (CleanRoomNotebookTaskRunPb) o; + return Objects.equals(collaboratorJobRunInfo, that.collaboratorJobRunInfo) + && Objects.equals(notebookEtag, that.notebookEtag) + && Objects.equals(notebookJobRunState, that.notebookJobRunState) + && Objects.equals(notebookName, that.notebookName) + && Objects.equals(notebookUpdatedAt, that.notebookUpdatedAt) + && Objects.equals(outputSchemaExpirationTime, that.outputSchemaExpirationTime) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(runDuration, that.runDuration) + && Objects.equals(startTime, that.startTime); + } + + @Override + public int hashCode() { + return Objects.hash( + collaboratorJobRunInfo, + notebookEtag, + notebookJobRunState, + notebookName, + notebookUpdatedAt, + outputSchemaExpirationTime, + outputSchemaName, + runDuration, + startTime); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomNotebookTaskRunPb.class) + .add("collaboratorJobRunInfo", collaboratorJobRunInfo) + .add("notebookEtag", notebookEtag) + .add("notebookJobRunState", notebookJobRunState) + .add("notebookName", notebookName) + .add("notebookUpdatedAt", notebookUpdatedAt) + .add("outputSchemaExpirationTime", outputSchemaExpirationTime) + .add("outputSchemaName", outputSchemaName) + .add("runDuration", runDuration) + .add("startTime", startTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java index eb864a9ee..ab38cb5dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomOutputCatalog.CleanRoomOutputCatalogSerializer.class) +@JsonDeserialize(using = CleanRoomOutputCatalog.CleanRoomOutputCatalogDeserializer.class) public class CleanRoomOutputCatalog { /** * The name of the output catalog in UC. It should follow [UC securable naming requirements]. The @@ -16,11 +27,9 @@ public class CleanRoomOutputCatalog { *

[UC securable naming requirements]: * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements */ - @JsonProperty("catalog_name") private String catalogName; /** */ - @JsonProperty("status") private CleanRoomOutputCatalogOutputCatalogStatus status; public CleanRoomOutputCatalog setCatalogName(String catalogName) { @@ -61,4 +70,43 @@ public String toString() { .add("status", status) .toString(); } + + CleanRoomOutputCatalogPb toPb() { + CleanRoomOutputCatalogPb pb = new CleanRoomOutputCatalogPb(); + pb.setCatalogName(catalogName); + pb.setStatus(status); + + return pb; + } + + static CleanRoomOutputCatalog fromPb(CleanRoomOutputCatalogPb pb) { + CleanRoomOutputCatalog model = new CleanRoomOutputCatalog(); + model.setCatalogName(pb.getCatalogName()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class CleanRoomOutputCatalogSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomOutputCatalog value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomOutputCatalogPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomOutputCatalogDeserializer + extends JsonDeserializer { + @Override + public CleanRoomOutputCatalog deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomOutputCatalogPb pb = mapper.readValue(p, CleanRoomOutputCatalogPb.class); + return CleanRoomOutputCatalog.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogPb.java new file mode 100755 index 000000000..c08d98705 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomOutputCatalogPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("status") + private CleanRoomOutputCatalogOutputCatalogStatus status; + + public CleanRoomOutputCatalogPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CleanRoomOutputCatalogPb setStatus(CleanRoomOutputCatalogOutputCatalogStatus status) { + this.status = status; + return this; + } + + public CleanRoomOutputCatalogOutputCatalogStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomOutputCatalogPb that = (CleanRoomOutputCatalogPb) o; + return Objects.equals(catalogName, that.catalogName) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, status); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomOutputCatalogPb.class) + .add("catalogName", catalogName) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomPb.java new file mode 100755 index 000000000..fc03de458 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomPb { + @JsonProperty("access_restricted") + private CleanRoomAccessRestricted accessRestricted; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("local_collaborator_alias") + private String localCollaboratorAlias; + + @JsonProperty("name") + private String name; + + @JsonProperty("output_catalog") + private CleanRoomOutputCatalog outputCatalog; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("remote_detailed_info") + private CleanRoomRemoteDetail remoteDetailedInfo; + + @JsonProperty("status") + private CleanRoomStatusEnum status; + + @JsonProperty("updated_at") + private Long updatedAt; + + public CleanRoomPb setAccessRestricted(CleanRoomAccessRestricted accessRestricted) { + this.accessRestricted = accessRestricted; + return this; + } + + public CleanRoomAccessRestricted getAccessRestricted() { + return accessRestricted; + } + + public CleanRoomPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CleanRoomPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CleanRoomPb setLocalCollaboratorAlias(String localCollaboratorAlias) { + this.localCollaboratorAlias = localCollaboratorAlias; + return this; + } + + public String getLocalCollaboratorAlias() { + return localCollaboratorAlias; + } + + public CleanRoomPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CleanRoomPb setOutputCatalog(CleanRoomOutputCatalog outputCatalog) { + this.outputCatalog = outputCatalog; + return this; + } + + public CleanRoomOutputCatalog getOutputCatalog() { + return outputCatalog; + } + + public CleanRoomPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CleanRoomPb setRemoteDetailedInfo(CleanRoomRemoteDetail remoteDetailedInfo) { + this.remoteDetailedInfo = remoteDetailedInfo; + return this; + } + + public CleanRoomRemoteDetail getRemoteDetailedInfo() { + return remoteDetailedInfo; + } + + public CleanRoomPb setStatus(CleanRoomStatusEnum status) { + this.status = status; + return this; + } + + public CleanRoomStatusEnum getStatus() { + return status; + } + + public CleanRoomPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomPb that = (CleanRoomPb) o; + return Objects.equals(accessRestricted, that.accessRestricted) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(localCollaboratorAlias, that.localCollaboratorAlias) + && Objects.equals(name, that.name) + && Objects.equals(outputCatalog, that.outputCatalog) + && Objects.equals(owner, that.owner) + && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo) + && Objects.equals(status, that.status) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + accessRestricted, + comment, + createdAt, + localCollaboratorAlias, + name, + outputCatalog, + owner, + remoteDetailedInfo, + status, + updatedAt); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomPb.class) + .add("accessRestricted", accessRestricted) + .add("comment", comment) + .add("createdAt", createdAt) + .add("localCollaboratorAlias", localCollaboratorAlias) + .add("name", name) + .add("outputCatalog", outputCatalog) + .add("owner", owner) + .add("remoteDetailedInfo", remoteDetailedInfo) + .add("status", status) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java index afb1ee357..57800d8d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Publicly visible central clean room details. */ @Generated +@JsonSerialize(using = CleanRoomRemoteDetail.CleanRoomRemoteDetailSerializer.class) +@JsonDeserialize(using = CleanRoomRemoteDetail.CleanRoomRemoteDetailDeserializer.class) public class CleanRoomRemoteDetail { /** Central clean room ID. */ - @JsonProperty("central_clean_room_id") private String centralCleanRoomId; /** Cloud vendor (aws,azure,gcp) of the central clean room. */ - @JsonProperty("cloud_vendor") private String cloudVendor; /** @@ -27,25 +36,20 @@ public class CleanRoomRemoteDetail { * *

2. Its invite_recipient_email is empty. */ - @JsonProperty("collaborators") private Collection collaborators; /** * The compliance security profile used to process regulated data following compliance standards. */ - @JsonProperty("compliance_security_profile") private ComplianceSecurityProfile complianceSecurityProfile; /** Collaborator who creates the clean room. */ - @JsonProperty("creator") private CleanRoomCollaborator creator; /** Egress network policy to apply to the central clean room workspace. */ - @JsonProperty("egress_network_policy") private com.databricks.sdk.service.settings.EgressNetworkPolicy egressNetworkPolicy; /** Region of the central clean room. */ - @JsonProperty("region") private String region; public CleanRoomRemoteDetail setCentralCleanRoomId(String centralCleanRoomId) { @@ -151,4 +155,53 @@ public String toString() { .add("region", region) .toString(); } + + CleanRoomRemoteDetailPb toPb() { + CleanRoomRemoteDetailPb pb = new CleanRoomRemoteDetailPb(); + pb.setCentralCleanRoomId(centralCleanRoomId); + pb.setCloudVendor(cloudVendor); + pb.setCollaborators(collaborators); + pb.setComplianceSecurityProfile(complianceSecurityProfile); + pb.setCreator(creator); + pb.setEgressNetworkPolicy(egressNetworkPolicy); + pb.setRegion(region); + + return pb; + } + + static CleanRoomRemoteDetail fromPb(CleanRoomRemoteDetailPb pb) { + CleanRoomRemoteDetail model = new CleanRoomRemoteDetail(); + model.setCentralCleanRoomId(pb.getCentralCleanRoomId()); + model.setCloudVendor(pb.getCloudVendor()); + model.setCollaborators(pb.getCollaborators()); + model.setComplianceSecurityProfile(pb.getComplianceSecurityProfile()); + model.setCreator(pb.getCreator()); + model.setEgressNetworkPolicy(pb.getEgressNetworkPolicy()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class CleanRoomRemoteDetailSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomRemoteDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomRemoteDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomRemoteDetailDeserializer + extends JsonDeserializer { + @Override + public CleanRoomRemoteDetail deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomRemoteDetailPb pb = mapper.readValue(p, CleanRoomRemoteDetailPb.class); + return CleanRoomRemoteDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetailPb.java new file mode 100755 index 000000000..b5e8f50dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetailPb.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Publicly visible central clean room details. */ +@Generated +class CleanRoomRemoteDetailPb { + @JsonProperty("central_clean_room_id") + private String centralCleanRoomId; + + @JsonProperty("cloud_vendor") + private String cloudVendor; + + @JsonProperty("collaborators") + private Collection collaborators; + + @JsonProperty("compliance_security_profile") + private ComplianceSecurityProfile complianceSecurityProfile; + + @JsonProperty("creator") + private CleanRoomCollaborator creator; + + @JsonProperty("egress_network_policy") + private com.databricks.sdk.service.settings.EgressNetworkPolicy egressNetworkPolicy; + + @JsonProperty("region") + private String region; + + public CleanRoomRemoteDetailPb setCentralCleanRoomId(String centralCleanRoomId) { + this.centralCleanRoomId = centralCleanRoomId; + return this; + } + + public String getCentralCleanRoomId() { + return centralCleanRoomId; + } + + public CleanRoomRemoteDetailPb setCloudVendor(String cloudVendor) { + this.cloudVendor = cloudVendor; + return this; + } + + public String getCloudVendor() { + return cloudVendor; + } + + public CleanRoomRemoteDetailPb setCollaborators(Collection collaborators) { + this.collaborators = collaborators; + return this; + } + + public Collection getCollaborators() { + return collaborators; + } + + public CleanRoomRemoteDetailPb setComplianceSecurityProfile( + ComplianceSecurityProfile complianceSecurityProfile) { + this.complianceSecurityProfile = complianceSecurityProfile; + return this; + } + + public ComplianceSecurityProfile getComplianceSecurityProfile() { + return complianceSecurityProfile; + } + + public CleanRoomRemoteDetailPb setCreator(CleanRoomCollaborator creator) { + this.creator = creator; + return this; + } + + public CleanRoomCollaborator getCreator() { + return creator; + } + + public CleanRoomRemoteDetailPb setEgressNetworkPolicy( + com.databricks.sdk.service.settings.EgressNetworkPolicy egressNetworkPolicy) { + this.egressNetworkPolicy = egressNetworkPolicy; + return this; + } + + public com.databricks.sdk.service.settings.EgressNetworkPolicy getEgressNetworkPolicy() { + return egressNetworkPolicy; + } + + public CleanRoomRemoteDetailPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomRemoteDetailPb that = (CleanRoomRemoteDetailPb) o; + return Objects.equals(centralCleanRoomId, that.centralCleanRoomId) + && Objects.equals(cloudVendor, that.cloudVendor) + && Objects.equals(collaborators, that.collaborators) + && Objects.equals(complianceSecurityProfile, that.complianceSecurityProfile) + && Objects.equals(creator, that.creator) + && Objects.equals(egressNetworkPolicy, that.egressNetworkPolicy) + && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash( + centralCleanRoomId, + cloudVendor, + collaborators, + complianceSecurityProfile, + creator, + egressNetworkPolicy, + region); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomRemoteDetailPb.class) + .add("centralCleanRoomId", centralCleanRoomId) + .add("cloudVendor", cloudVendor) + .add("collaborators", collaborators) + .add("complianceSecurityProfile", complianceSecurityProfile) + .add("creator", creator) + .add("egressNetworkPolicy", egressNetworkPolicy) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java index e406e6a03..001e05c28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java @@ -21,7 +21,7 @@ public ListCleanRoomNotebookTaskRunsResponse list(ListCleanRoomNotebookTaskRunsR String path = String.format("/api/2.0/clean-rooms/%s/runs", request.getCleanRoomName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListCleanRoomNotebookTaskRunsResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java index b0bacf5d0..e5d948b3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java @@ -21,7 +21,7 @@ public CleanRoom create(CreateCleanRoomRequest request) { String path = "/api/2.0/clean-rooms"; try { Request req = new Request("POST", path, apiClient.serialize(request.getCleanRoom())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CleanRoom.class); @@ -37,7 +37,7 @@ public CreateCleanRoomOutputCatalogResponse createOutputCatalog( String.format("/api/2.0/clean-rooms/%s/output-catalogs", request.getCleanRoomName()); try { Request req = new Request("POST", path, apiClient.serialize(request.getOutputCatalog())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateCleanRoomOutputCatalogResponse.class); @@ -51,7 +51,7 @@ public void delete(DeleteCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -64,7 +64,7 @@ public CleanRoom get(GetCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CleanRoom.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public ListCleanRoomsResponse list(ListCleanRoomsRequest request) { String path = "/api/2.0/clean-rooms"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListCleanRoomsResponse.class); } catch (IOException e) { @@ -90,7 +90,7 @@ public CleanRoom update(UpdateCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CleanRoom.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java index ba03b0120..9c9e5c834 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CollaboratorJobRunInfo.CollaboratorJobRunInfoSerializer.class) +@JsonDeserialize(using = CollaboratorJobRunInfo.CollaboratorJobRunInfoDeserializer.class) public class CollaboratorJobRunInfo { /** Alias of the collaborator that triggered the task run. */ - @JsonProperty("collaborator_alias") private String collaboratorAlias; /** Job ID of the task run in the collaborator's workspace. */ - @JsonProperty("collaborator_job_id") private Long collaboratorJobId; /** Job run ID of the task run in the collaborator's workspace. */ - @JsonProperty("collaborator_job_run_id") private Long collaboratorJobRunId; /** Task run ID of the task run in the collaborator's workspace. */ - @JsonProperty("collaborator_task_run_id") private Long collaboratorTaskRunId; /** ID of the collaborator's workspace that triggered the task run. */ - @JsonProperty("collaborator_workspace_id") private Long collaboratorWorkspaceId; public CollaboratorJobRunInfo setCollaboratorAlias(String collaboratorAlias) { @@ -106,4 +112,49 @@ public String toString() { .add("collaboratorWorkspaceId", collaboratorWorkspaceId) .toString(); } + + CollaboratorJobRunInfoPb toPb() { + CollaboratorJobRunInfoPb pb = new CollaboratorJobRunInfoPb(); + pb.setCollaboratorAlias(collaboratorAlias); + pb.setCollaboratorJobId(collaboratorJobId); + pb.setCollaboratorJobRunId(collaboratorJobRunId); + pb.setCollaboratorTaskRunId(collaboratorTaskRunId); + pb.setCollaboratorWorkspaceId(collaboratorWorkspaceId); + + return pb; + } + + static CollaboratorJobRunInfo fromPb(CollaboratorJobRunInfoPb pb) { + CollaboratorJobRunInfo model = new CollaboratorJobRunInfo(); + model.setCollaboratorAlias(pb.getCollaboratorAlias()); + model.setCollaboratorJobId(pb.getCollaboratorJobId()); + model.setCollaboratorJobRunId(pb.getCollaboratorJobRunId()); + model.setCollaboratorTaskRunId(pb.getCollaboratorTaskRunId()); + model.setCollaboratorWorkspaceId(pb.getCollaboratorWorkspaceId()); + + return model; + } + + public static class CollaboratorJobRunInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + CollaboratorJobRunInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CollaboratorJobRunInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CollaboratorJobRunInfoDeserializer + extends JsonDeserializer { + @Override + public CollaboratorJobRunInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CollaboratorJobRunInfoPb pb = mapper.readValue(p, CollaboratorJobRunInfoPb.class); + return CollaboratorJobRunInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfoPb.java new file mode 100755 index 000000000..c1d0fa2ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfoPb.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CollaboratorJobRunInfoPb { + @JsonProperty("collaborator_alias") + private String collaboratorAlias; + + @JsonProperty("collaborator_job_id") + private Long collaboratorJobId; + + @JsonProperty("collaborator_job_run_id") + private Long collaboratorJobRunId; + + @JsonProperty("collaborator_task_run_id") + private Long collaboratorTaskRunId; + + @JsonProperty("collaborator_workspace_id") + private Long collaboratorWorkspaceId; + + public CollaboratorJobRunInfoPb setCollaboratorAlias(String collaboratorAlias) { + this.collaboratorAlias = collaboratorAlias; + return this; + } + + public String getCollaboratorAlias() { + return collaboratorAlias; + } + + public CollaboratorJobRunInfoPb setCollaboratorJobId(Long collaboratorJobId) { + this.collaboratorJobId = collaboratorJobId; + return this; + } + + public Long getCollaboratorJobId() { + return collaboratorJobId; + } + + public CollaboratorJobRunInfoPb setCollaboratorJobRunId(Long collaboratorJobRunId) { + this.collaboratorJobRunId = collaboratorJobRunId; + return this; + } + + public Long getCollaboratorJobRunId() { + return collaboratorJobRunId; + } + + public CollaboratorJobRunInfoPb setCollaboratorTaskRunId(Long collaboratorTaskRunId) { + this.collaboratorTaskRunId = collaboratorTaskRunId; + return this; + } + + public Long getCollaboratorTaskRunId() { + return collaboratorTaskRunId; + } + + public CollaboratorJobRunInfoPb setCollaboratorWorkspaceId(Long collaboratorWorkspaceId) { + this.collaboratorWorkspaceId = collaboratorWorkspaceId; + return this; + } + + public Long getCollaboratorWorkspaceId() { + return collaboratorWorkspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CollaboratorJobRunInfoPb that = (CollaboratorJobRunInfoPb) o; + return Objects.equals(collaboratorAlias, that.collaboratorAlias) + && Objects.equals(collaboratorJobId, that.collaboratorJobId) + && Objects.equals(collaboratorJobRunId, that.collaboratorJobRunId) + && Objects.equals(collaboratorTaskRunId, that.collaboratorTaskRunId) + && Objects.equals(collaboratorWorkspaceId, that.collaboratorWorkspaceId); + } + + @Override + public int hashCode() { + return Objects.hash( + collaboratorAlias, + collaboratorJobId, + collaboratorJobRunId, + collaboratorTaskRunId, + collaboratorWorkspaceId); + } + + @Override + public String toString() { + return new ToStringer(CollaboratorJobRunInfoPb.class) + .add("collaboratorAlias", collaboratorAlias) + .add("collaboratorJobId", collaboratorJobId) + .add("collaboratorJobRunId", collaboratorJobRunId) + .add("collaboratorTaskRunId", collaboratorTaskRunId) + .add("collaboratorWorkspaceId", collaboratorWorkspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java index 813e6bf57..4a0b06f70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -12,15 +21,15 @@ * The compliance security profile used to process regulated data following compliance standards. */ @Generated +@JsonSerialize(using = ComplianceSecurityProfile.ComplianceSecurityProfileSerializer.class) +@JsonDeserialize(using = ComplianceSecurityProfile.ComplianceSecurityProfileDeserializer.class) public class ComplianceSecurityProfile { /** * The list of compliance standards that the compliance security profile is configured to enforce. */ - @JsonProperty("compliance_standards") private Collection complianceStandards; /** Whether the compliance security profile is enabled. */ - @JsonProperty("is_enabled") private Boolean isEnabled; public ComplianceSecurityProfile setComplianceStandards( @@ -64,4 +73,43 @@ public String toString() { .add("isEnabled", isEnabled) .toString(); } + + ComplianceSecurityProfilePb toPb() { + ComplianceSecurityProfilePb pb = new ComplianceSecurityProfilePb(); + pb.setComplianceStandards(complianceStandards); + pb.setIsEnabled(isEnabled); + + return pb; + } + + static ComplianceSecurityProfile fromPb(ComplianceSecurityProfilePb pb) { + ComplianceSecurityProfile model = new ComplianceSecurityProfile(); + model.setComplianceStandards(pb.getComplianceStandards()); + model.setIsEnabled(pb.getIsEnabled()); + + return model; + } + + public static class ComplianceSecurityProfileSerializer + extends JsonSerializer { + @Override + public void serialize( + ComplianceSecurityProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComplianceSecurityProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComplianceSecurityProfileDeserializer + extends JsonDeserializer { + @Override + public ComplianceSecurityProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComplianceSecurityProfilePb pb = mapper.readValue(p, ComplianceSecurityProfilePb.class); + return ComplianceSecurityProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfilePb.java new file mode 100755 index 000000000..d076896ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfilePb.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The compliance security profile used to process regulated data following compliance standards. + */ +@Generated +class ComplianceSecurityProfilePb { + @JsonProperty("compliance_standards") + private Collection complianceStandards; + + @JsonProperty("is_enabled") + private Boolean isEnabled; + + public ComplianceSecurityProfilePb setComplianceStandards( + Collection complianceStandards) { + this.complianceStandards = complianceStandards; + return this; + } + + public Collection + getComplianceStandards() { + return complianceStandards; + } + + public ComplianceSecurityProfilePb setIsEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled; + return this; + } + + public Boolean getIsEnabled() { + return isEnabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComplianceSecurityProfilePb that = (ComplianceSecurityProfilePb) o; + return Objects.equals(complianceStandards, that.complianceStandards) + && Objects.equals(isEnabled, that.isEnabled); + } + + @Override + public int hashCode() { + return Objects.hash(complianceStandards, isEnabled); + } + + @Override + public String toString() { + return new ToStringer(ComplianceSecurityProfilePb.class) + .add("complianceStandards", complianceStandards) + .add("isEnabled", isEnabled) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/Converters.java new file mode 100755 index 000000000..6582539bd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.cleanrooms; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java index 5a36d4906..ad1a7550e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an asset */ @Generated +@JsonSerialize(using = CreateCleanRoomAssetRequest.CreateCleanRoomAssetRequestSerializer.class) +@JsonDeserialize(using = CreateCleanRoomAssetRequest.CreateCleanRoomAssetRequestDeserializer.class) public class CreateCleanRoomAssetRequest { /** Metadata of the clean room asset */ - @JsonProperty("asset") private CleanRoomAsset asset; /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; + private String cleanRoomName; public CreateCleanRoomAssetRequest setAsset(CleanRoomAsset asset) { this.asset = asset; @@ -56,4 +65,43 @@ public String toString() { .add("cleanRoomName", cleanRoomName) .toString(); } + + CreateCleanRoomAssetRequestPb toPb() { + CreateCleanRoomAssetRequestPb pb = new CreateCleanRoomAssetRequestPb(); + pb.setAsset(asset); + pb.setCleanRoomName(cleanRoomName); + + return pb; + } + + static CreateCleanRoomAssetRequest fromPb(CreateCleanRoomAssetRequestPb pb) { + CreateCleanRoomAssetRequest model = new CreateCleanRoomAssetRequest(); + model.setAsset(pb.getAsset()); + model.setCleanRoomName(pb.getCleanRoomName()); + + return model; + } + + public static class CreateCleanRoomAssetRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCleanRoomAssetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCleanRoomAssetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCleanRoomAssetRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCleanRoomAssetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCleanRoomAssetRequestPb pb = mapper.readValue(p, CreateCleanRoomAssetRequestPb.class); + return CreateCleanRoomAssetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequestPb.java new file mode 100755 index 000000000..6ede5507a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an asset */ +@Generated +class CreateCleanRoomAssetRequestPb { + @JsonProperty("asset") + private CleanRoomAsset asset; + + @JsonIgnore private String cleanRoomName; + + public CreateCleanRoomAssetRequestPb setAsset(CleanRoomAsset asset) { + this.asset = asset; + return this; + } + + public CleanRoomAsset getAsset() { + return asset; + } + + public CreateCleanRoomAssetRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCleanRoomAssetRequestPb that = (CreateCleanRoomAssetRequestPb) o; + return Objects.equals(asset, that.asset) && Objects.equals(cleanRoomName, that.cleanRoomName); + } + + @Override + public int hashCode() { + return Objects.hash(asset, cleanRoomName); + } + + @Override + public String toString() { + return new ToStringer(CreateCleanRoomAssetRequestPb.class) + .add("asset", asset) + .add("cleanRoomName", cleanRoomName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java index 230ec81f3..0ba08e4c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java @@ -4,18 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an output catalog */ @Generated +@JsonSerialize( + using = CreateCleanRoomOutputCatalogRequest.CreateCleanRoomOutputCatalogRequestSerializer.class) +@JsonDeserialize( + using = + CreateCleanRoomOutputCatalogRequest.CreateCleanRoomOutputCatalogRequestDeserializer.class) public class CreateCleanRoomOutputCatalogRequest { /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; + private String cleanRoomName; /** */ - @JsonProperty("output_catalog") private CleanRoomOutputCatalog outputCatalog; public CreateCleanRoomOutputCatalogRequest setCleanRoomName(String cleanRoomName) { @@ -58,4 +70,44 @@ public String toString() { .add("outputCatalog", outputCatalog) .toString(); } + + CreateCleanRoomOutputCatalogRequestPb toPb() { + CreateCleanRoomOutputCatalogRequestPb pb = new CreateCleanRoomOutputCatalogRequestPb(); + pb.setCleanRoomName(cleanRoomName); + pb.setOutputCatalog(outputCatalog); + + return pb; + } + + static CreateCleanRoomOutputCatalogRequest fromPb(CreateCleanRoomOutputCatalogRequestPb pb) { + CreateCleanRoomOutputCatalogRequest model = new CreateCleanRoomOutputCatalogRequest(); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setOutputCatalog(pb.getOutputCatalog()); + + return model; + } + + public static class CreateCleanRoomOutputCatalogRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCleanRoomOutputCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCleanRoomOutputCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCleanRoomOutputCatalogRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCleanRoomOutputCatalogRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCleanRoomOutputCatalogRequestPb pb = + mapper.readValue(p, CreateCleanRoomOutputCatalogRequestPb.class); + return CreateCleanRoomOutputCatalogRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequestPb.java new file mode 100755 index 000000000..adbe574fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an output catalog */ +@Generated +class CreateCleanRoomOutputCatalogRequestPb { + @JsonIgnore private String cleanRoomName; + + @JsonProperty("output_catalog") + private CleanRoomOutputCatalog outputCatalog; + + public CreateCleanRoomOutputCatalogRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public CreateCleanRoomOutputCatalogRequestPb setOutputCatalog( + CleanRoomOutputCatalog outputCatalog) { + this.outputCatalog = outputCatalog; + return this; + } + + public CleanRoomOutputCatalog getOutputCatalog() { + return outputCatalog; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCleanRoomOutputCatalogRequestPb that = (CreateCleanRoomOutputCatalogRequestPb) o; + return Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(outputCatalog, that.outputCatalog); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoomName, outputCatalog); + } + + @Override + public String toString() { + return new ToStringer(CreateCleanRoomOutputCatalogRequestPb.class) + .add("cleanRoomName", cleanRoomName) + .add("outputCatalog", outputCatalog) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java index 05732e12f..b0f37fce3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java @@ -4,13 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateCleanRoomOutputCatalogResponse.CreateCleanRoomOutputCatalogResponseSerializer.class) +@JsonDeserialize( + using = + CreateCleanRoomOutputCatalogResponse.CreateCleanRoomOutputCatalogResponseDeserializer.class) public class CreateCleanRoomOutputCatalogResponse { /** */ - @JsonProperty("output_catalog") private CleanRoomOutputCatalog outputCatalog; public CreateCleanRoomOutputCatalogResponse setOutputCatalog( @@ -42,4 +56,42 @@ public String toString() { .add("outputCatalog", outputCatalog) .toString(); } + + CreateCleanRoomOutputCatalogResponsePb toPb() { + CreateCleanRoomOutputCatalogResponsePb pb = new CreateCleanRoomOutputCatalogResponsePb(); + pb.setOutputCatalog(outputCatalog); + + return pb; + } + + static CreateCleanRoomOutputCatalogResponse fromPb(CreateCleanRoomOutputCatalogResponsePb pb) { + CreateCleanRoomOutputCatalogResponse model = new CreateCleanRoomOutputCatalogResponse(); + model.setOutputCatalog(pb.getOutputCatalog()); + + return model; + } + + public static class CreateCleanRoomOutputCatalogResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCleanRoomOutputCatalogResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCleanRoomOutputCatalogResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCleanRoomOutputCatalogResponseDeserializer + extends JsonDeserializer { + @Override + public CreateCleanRoomOutputCatalogResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCleanRoomOutputCatalogResponsePb pb = + mapper.readValue(p, CreateCleanRoomOutputCatalogResponsePb.class); + return CreateCleanRoomOutputCatalogResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponsePb.java new file mode 100755 index 000000000..a77c05029 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCleanRoomOutputCatalogResponsePb { + @JsonProperty("output_catalog") + private CleanRoomOutputCatalog outputCatalog; + + public CreateCleanRoomOutputCatalogResponsePb setOutputCatalog( + CleanRoomOutputCatalog outputCatalog) { + this.outputCatalog = outputCatalog; + return this; + } + + public CleanRoomOutputCatalog getOutputCatalog() { + return outputCatalog; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCleanRoomOutputCatalogResponsePb that = (CreateCleanRoomOutputCatalogResponsePb) o; + return Objects.equals(outputCatalog, that.outputCatalog); + } + + @Override + public int hashCode() { + return Objects.hash(outputCatalog); + } + + @Override + public String toString() { + return new ToStringer(CreateCleanRoomOutputCatalogResponsePb.class) + .add("outputCatalog", outputCatalog) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java index 13930fe82..ce5f170e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create a clean room */ @Generated +@JsonSerialize(using = CreateCleanRoomRequest.CreateCleanRoomRequestSerializer.class) +@JsonDeserialize(using = CreateCleanRoomRequest.CreateCleanRoomRequestDeserializer.class) public class CreateCleanRoomRequest { /** */ - @JsonProperty("clean_room") private CleanRoom cleanRoom; public CreateCleanRoomRequest setCleanRoom(CleanRoom cleanRoom) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateCleanRoomRequest.class).add("cleanRoom", cleanRoom).toString(); } + + CreateCleanRoomRequestPb toPb() { + CreateCleanRoomRequestPb pb = new CreateCleanRoomRequestPb(); + pb.setCleanRoom(cleanRoom); + + return pb; + } + + static CreateCleanRoomRequest fromPb(CreateCleanRoomRequestPb pb) { + CreateCleanRoomRequest model = new CreateCleanRoomRequest(); + model.setCleanRoom(pb.getCleanRoom()); + + return model; + } + + public static class CreateCleanRoomRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCleanRoomRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCleanRoomRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCleanRoomRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCleanRoomRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCleanRoomRequestPb pb = mapper.readValue(p, CreateCleanRoomRequestPb.class); + return CreateCleanRoomRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequestPb.java new file mode 100755 index 000000000..957efe3a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequestPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a clean room */ +@Generated +class CreateCleanRoomRequestPb { + @JsonProperty("clean_room") + private CleanRoom cleanRoom; + + public CreateCleanRoomRequestPb setCleanRoom(CleanRoom cleanRoom) { + this.cleanRoom = cleanRoom; + return this; + } + + public CleanRoom getCleanRoom() { + return cleanRoom; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCleanRoomRequestPb that = (CreateCleanRoomRequestPb) o; + return Objects.equals(cleanRoom, that.cleanRoom); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoom); + } + + @Override + public String toString() { + return new ToStringer(CreateCleanRoomRequestPb.class).add("cleanRoom", cleanRoom).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java index c75e90f31..e51518019 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java @@ -4,29 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an asset */ @Generated +@JsonSerialize(using = DeleteCleanRoomAssetRequest.DeleteCleanRoomAssetRequestSerializer.class) +@JsonDeserialize(using = DeleteCleanRoomAssetRequest.DeleteCleanRoomAssetRequestDeserializer.class) public class DeleteCleanRoomAssetRequest { - /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ - @JsonIgnore private String assetFullName; - /** The type of the asset. */ - @JsonIgnore private CleanRoomAssetAssetType assetType; + private CleanRoomAssetAssetType assetType; /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; - - public DeleteCleanRoomAssetRequest setAssetFullName(String assetFullName) { - this.assetFullName = assetFullName; - return this; - } + private String cleanRoomName; - public String getAssetFullName() { - return assetFullName; - } + /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ + private String name; public DeleteCleanRoomAssetRequest setAssetType(CleanRoomAssetAssetType assetType) { this.assetType = assetType; @@ -46,27 +48,77 @@ public String getCleanRoomName() { return cleanRoomName; } + public DeleteCleanRoomAssetRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteCleanRoomAssetRequest that = (DeleteCleanRoomAssetRequest) o; - return Objects.equals(assetFullName, that.assetFullName) - && Objects.equals(assetType, that.assetType) - && Objects.equals(cleanRoomName, that.cleanRoomName); + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(assetFullName, assetType, cleanRoomName); + return Objects.hash(assetType, cleanRoomName, name); } @Override public String toString() { return new ToStringer(DeleteCleanRoomAssetRequest.class) - .add("assetFullName", assetFullName) .add("assetType", assetType) .add("cleanRoomName", cleanRoomName) + .add("name", name) .toString(); } + + DeleteCleanRoomAssetRequestPb toPb() { + DeleteCleanRoomAssetRequestPb pb = new DeleteCleanRoomAssetRequestPb(); + pb.setAssetType(assetType); + pb.setCleanRoomName(cleanRoomName); + pb.setName(name); + + return pb; + } + + static DeleteCleanRoomAssetRequest fromPb(DeleteCleanRoomAssetRequestPb pb) { + DeleteCleanRoomAssetRequest model = new DeleteCleanRoomAssetRequest(); + model.setAssetType(pb.getAssetType()); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteCleanRoomAssetRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCleanRoomAssetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCleanRoomAssetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCleanRoomAssetRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCleanRoomAssetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCleanRoomAssetRequestPb pb = mapper.readValue(p, DeleteCleanRoomAssetRequestPb.class); + return DeleteCleanRoomAssetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequestPb.java new file mode 100755 index 000000000..d291dba19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an asset */ +@Generated +class DeleteCleanRoomAssetRequestPb { + @JsonIgnore private CleanRoomAssetAssetType assetType; + + @JsonIgnore private String cleanRoomName; + + @JsonIgnore private String name; + + public DeleteCleanRoomAssetRequestPb setAssetType(CleanRoomAssetAssetType assetType) { + this.assetType = assetType; + return this; + } + + public CleanRoomAssetAssetType getAssetType() { + return assetType; + } + + public DeleteCleanRoomAssetRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public DeleteCleanRoomAssetRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCleanRoomAssetRequestPb that = (DeleteCleanRoomAssetRequestPb) o; + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(assetType, cleanRoomName, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteCleanRoomAssetRequestPb.class) + .add("assetType", assetType) + .add("cleanRoomName", cleanRoomName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java index 4efe5848d..fb2a47672 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java @@ -4,6 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -11,6 +21,9 @@ * not externd UnshadedMessageMarker. */ @Generated +@JsonSerialize(using = DeleteCleanRoomAssetResponse.DeleteCleanRoomAssetResponseSerializer.class) +@JsonDeserialize( + using = DeleteCleanRoomAssetResponse.DeleteCleanRoomAssetResponseDeserializer.class) public class DeleteCleanRoomAssetResponse { @Override @@ -29,4 +42,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCleanRoomAssetResponse.class).toString(); } + + DeleteCleanRoomAssetResponsePb toPb() { + DeleteCleanRoomAssetResponsePb pb = new DeleteCleanRoomAssetResponsePb(); + + return pb; + } + + static DeleteCleanRoomAssetResponse fromPb(DeleteCleanRoomAssetResponsePb pb) { + DeleteCleanRoomAssetResponse model = new DeleteCleanRoomAssetResponse(); + + return model; + } + + public static class DeleteCleanRoomAssetResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCleanRoomAssetResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCleanRoomAssetResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCleanRoomAssetResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteCleanRoomAssetResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCleanRoomAssetResponsePb pb = mapper.readValue(p, DeleteCleanRoomAssetResponsePb.class); + return DeleteCleanRoomAssetResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponsePb.java new file mode 100755 index 000000000..537af1d20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponsePb.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** + * Response for delete clean room request. Using an empty message since the generic Empty proto does + * not externd UnshadedMessageMarker. + */ +@Generated +class DeleteCleanRoomAssetResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCleanRoomAssetResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java index 7681b1fc5..242715ba6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a clean room */ @Generated +@JsonSerialize(using = DeleteCleanRoomRequest.DeleteCleanRoomRequestSerializer.class) +@JsonDeserialize(using = DeleteCleanRoomRequest.DeleteCleanRoomRequestDeserializer.class) public class DeleteCleanRoomRequest { /** Name of the clean room. */ - @JsonIgnore private String name; + private String name; public DeleteCleanRoomRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCleanRoomRequest.class).add("name", name).toString(); } + + DeleteCleanRoomRequestPb toPb() { + DeleteCleanRoomRequestPb pb = new DeleteCleanRoomRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteCleanRoomRequest fromPb(DeleteCleanRoomRequestPb pb) { + DeleteCleanRoomRequest model = new DeleteCleanRoomRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteCleanRoomRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCleanRoomRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCleanRoomRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCleanRoomRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCleanRoomRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCleanRoomRequestPb pb = mapper.readValue(p, DeleteCleanRoomRequestPb.class); + return DeleteCleanRoomRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequestPb.java new file mode 100755 index 000000000..120091ff1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a clean room */ +@Generated +class DeleteCleanRoomRequestPb { + @JsonIgnore private String name; + + public DeleteCleanRoomRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCleanRoomRequestPb that = (DeleteCleanRoomRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteCleanRoomRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java index 3bb766a47..8ac504fd1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponsePb.java new file mode 100755 index 000000000..f80c43075 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java index 9bc47aca3..bb06acdf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java @@ -4,29 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an asset */ @Generated +@JsonSerialize(using = GetCleanRoomAssetRequest.GetCleanRoomAssetRequestSerializer.class) +@JsonDeserialize(using = GetCleanRoomAssetRequest.GetCleanRoomAssetRequestDeserializer.class) public class GetCleanRoomAssetRequest { - /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ - @JsonIgnore private String assetFullName; - /** The type of the asset. */ - @JsonIgnore private CleanRoomAssetAssetType assetType; + private CleanRoomAssetAssetType assetType; /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; - - public GetCleanRoomAssetRequest setAssetFullName(String assetFullName) { - this.assetFullName = assetFullName; - return this; - } + private String cleanRoomName; - public String getAssetFullName() { - return assetFullName; - } + /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ + private String name; public GetCleanRoomAssetRequest setAssetType(CleanRoomAssetAssetType assetType) { this.assetType = assetType; @@ -46,27 +48,77 @@ public String getCleanRoomName() { return cleanRoomName; } + public GetCleanRoomAssetRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetCleanRoomAssetRequest that = (GetCleanRoomAssetRequest) o; - return Objects.equals(assetFullName, that.assetFullName) - && Objects.equals(assetType, that.assetType) - && Objects.equals(cleanRoomName, that.cleanRoomName); + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(assetFullName, assetType, cleanRoomName); + return Objects.hash(assetType, cleanRoomName, name); } @Override public String toString() { return new ToStringer(GetCleanRoomAssetRequest.class) - .add("assetFullName", assetFullName) .add("assetType", assetType) .add("cleanRoomName", cleanRoomName) + .add("name", name) .toString(); } + + GetCleanRoomAssetRequestPb toPb() { + GetCleanRoomAssetRequestPb pb = new GetCleanRoomAssetRequestPb(); + pb.setAssetType(assetType); + pb.setCleanRoomName(cleanRoomName); + pb.setName(name); + + return pb; + } + + static GetCleanRoomAssetRequest fromPb(GetCleanRoomAssetRequestPb pb) { + GetCleanRoomAssetRequest model = new GetCleanRoomAssetRequest(); + model.setAssetType(pb.getAssetType()); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setName(pb.getName()); + + return model; + } + + public static class GetCleanRoomAssetRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCleanRoomAssetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCleanRoomAssetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCleanRoomAssetRequestDeserializer + extends JsonDeserializer { + @Override + public GetCleanRoomAssetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCleanRoomAssetRequestPb pb = mapper.readValue(p, GetCleanRoomAssetRequestPb.class); + return GetCleanRoomAssetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequestPb.java new file mode 100755 index 000000000..444312469 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an asset */ +@Generated +class GetCleanRoomAssetRequestPb { + @JsonIgnore private CleanRoomAssetAssetType assetType; + + @JsonIgnore private String cleanRoomName; + + @JsonIgnore private String name; + + public GetCleanRoomAssetRequestPb setAssetType(CleanRoomAssetAssetType assetType) { + this.assetType = assetType; + return this; + } + + public CleanRoomAssetAssetType getAssetType() { + return assetType; + } + + public GetCleanRoomAssetRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public GetCleanRoomAssetRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCleanRoomAssetRequestPb that = (GetCleanRoomAssetRequestPb) o; + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(assetType, cleanRoomName, name); + } + + @Override + public String toString() { + return new ToStringer(GetCleanRoomAssetRequestPb.class) + .add("assetType", assetType) + .add("cleanRoomName", cleanRoomName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java index 6103c2c8f..510699c69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a clean room */ @Generated +@JsonSerialize(using = GetCleanRoomRequest.GetCleanRoomRequestSerializer.class) +@JsonDeserialize(using = GetCleanRoomRequest.GetCleanRoomRequestDeserializer.class) public class GetCleanRoomRequest { /** */ - @JsonIgnore private String name; + private String name; public GetCleanRoomRequest setName(String name) { this.name = name; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetCleanRoomRequest.class).add("name", name).toString(); } + + GetCleanRoomRequestPb toPb() { + GetCleanRoomRequestPb pb = new GetCleanRoomRequestPb(); + pb.setName(name); + + return pb; + } + + static GetCleanRoomRequest fromPb(GetCleanRoomRequestPb pb) { + GetCleanRoomRequest model = new GetCleanRoomRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetCleanRoomRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetCleanRoomRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCleanRoomRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCleanRoomRequestDeserializer + extends JsonDeserializer { + @Override + public GetCleanRoomRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCleanRoomRequestPb pb = mapper.readValue(p, GetCleanRoomRequestPb.class); + return GetCleanRoomRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequestPb.java new file mode 100755 index 000000000..f88242279 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a clean room */ +@Generated +class GetCleanRoomRequestPb { + @JsonIgnore private String name; + + public GetCleanRoomRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCleanRoomRequestPb that = (GetCleanRoomRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetCleanRoomRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java index 40bb4fef0..acfc58507 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List assets */ @Generated +@JsonSerialize(using = ListCleanRoomAssetsRequest.ListCleanRoomAssetsRequestSerializer.class) +@JsonDeserialize(using = ListCleanRoomAssetsRequest.ListCleanRoomAssetsRequestDeserializer.class) public class ListCleanRoomAssetsRequest { /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; + private String cleanRoomName; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListCleanRoomAssetsRequest setCleanRoomName(String cleanRoomName) { @@ -58,4 +66,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListCleanRoomAssetsRequestPb toPb() { + ListCleanRoomAssetsRequestPb pb = new ListCleanRoomAssetsRequestPb(); + pb.setCleanRoomName(cleanRoomName); + pb.setPageToken(pageToken); + + return pb; + } + + static ListCleanRoomAssetsRequest fromPb(ListCleanRoomAssetsRequestPb pb) { + ListCleanRoomAssetsRequest model = new ListCleanRoomAssetsRequest(); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListCleanRoomAssetsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomAssetsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomAssetsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomAssetsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomAssetsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomAssetsRequestPb pb = mapper.readValue(p, ListCleanRoomAssetsRequestPb.class); + return ListCleanRoomAssetsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequestPb.java new file mode 100755 index 000000000..c37c8707e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List assets */ +@Generated +class ListCleanRoomAssetsRequestPb { + @JsonIgnore private String cleanRoomName; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListCleanRoomAssetsRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public ListCleanRoomAssetsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomAssetsRequestPb that = (ListCleanRoomAssetsRequestPb) o; + return Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoomName, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomAssetsRequestPb.class) + .add("cleanRoomName", cleanRoomName) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponse.java index 528f067b1..6df8c18c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListCleanRoomAssetsResponse.ListCleanRoomAssetsResponseSerializer.class) +@JsonDeserialize(using = ListCleanRoomAssetsResponse.ListCleanRoomAssetsResponseDeserializer.class) public class ListCleanRoomAssetsResponse { /** Assets in the clean room. */ - @JsonProperty("assets") private Collection assets; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * page_token should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListCleanRoomAssetsResponse setAssets(Collection assets) { @@ -59,4 +68,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListCleanRoomAssetsResponsePb toPb() { + ListCleanRoomAssetsResponsePb pb = new ListCleanRoomAssetsResponsePb(); + pb.setAssets(assets); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListCleanRoomAssetsResponse fromPb(ListCleanRoomAssetsResponsePb pb) { + ListCleanRoomAssetsResponse model = new ListCleanRoomAssetsResponse(); + model.setAssets(pb.getAssets()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListCleanRoomAssetsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomAssetsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomAssetsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomAssetsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomAssetsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomAssetsResponsePb pb = mapper.readValue(p, ListCleanRoomAssetsResponsePb.class); + return ListCleanRoomAssetsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponsePb.java new file mode 100755 index 000000000..593dd556b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCleanRoomAssetsResponsePb { + @JsonProperty("assets") + private Collection assets; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListCleanRoomAssetsResponsePb setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public ListCleanRoomAssetsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomAssetsResponsePb that = (ListCleanRoomAssetsResponsePb) o; + return Objects.equals(assets, that.assets) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(assets, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomAssetsResponsePb.class) + .add("assets", assets) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java index 5b64fdb82..d078fc4f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java @@ -3,30 +3,38 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List notebook task runs */ @Generated +@JsonSerialize( + using = + ListCleanRoomNotebookTaskRunsRequest.ListCleanRoomNotebookTaskRunsRequestSerializer.class) +@JsonDeserialize( + using = + ListCleanRoomNotebookTaskRunsRequest.ListCleanRoomNotebookTaskRunsRequestDeserializer.class) public class ListCleanRoomNotebookTaskRunsRequest { /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; + private String cleanRoomName; /** Notebook name */ - @JsonIgnore - @QueryParam("notebook_name") private String notebookName; /** The maximum number of task runs to return. Currently ignored - all runs will be returned. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListCleanRoomNotebookTaskRunsRequest setCleanRoomName(String cleanRoomName) { @@ -90,4 +98,48 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListCleanRoomNotebookTaskRunsRequestPb toPb() { + ListCleanRoomNotebookTaskRunsRequestPb pb = new ListCleanRoomNotebookTaskRunsRequestPb(); + pb.setCleanRoomName(cleanRoomName); + pb.setNotebookName(notebookName); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListCleanRoomNotebookTaskRunsRequest fromPb(ListCleanRoomNotebookTaskRunsRequestPb pb) { + ListCleanRoomNotebookTaskRunsRequest model = new ListCleanRoomNotebookTaskRunsRequest(); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setNotebookName(pb.getNotebookName()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListCleanRoomNotebookTaskRunsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomNotebookTaskRunsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomNotebookTaskRunsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomNotebookTaskRunsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomNotebookTaskRunsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomNotebookTaskRunsRequestPb pb = + mapper.readValue(p, ListCleanRoomNotebookTaskRunsRequestPb.class); + return ListCleanRoomNotebookTaskRunsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequestPb.java new file mode 100755 index 000000000..ad3e1f3eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequestPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List notebook task runs */ +@Generated +class ListCleanRoomNotebookTaskRunsRequestPb { + @JsonIgnore private String cleanRoomName; + + @JsonIgnore + @QueryParam("notebook_name") + private String notebookName; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListCleanRoomNotebookTaskRunsRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public ListCleanRoomNotebookTaskRunsRequestPb setNotebookName(String notebookName) { + this.notebookName = notebookName; + return this; + } + + public String getNotebookName() { + return notebookName; + } + + public ListCleanRoomNotebookTaskRunsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListCleanRoomNotebookTaskRunsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomNotebookTaskRunsRequestPb that = (ListCleanRoomNotebookTaskRunsRequestPb) o; + return Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(notebookName, that.notebookName) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoomName, notebookName, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomNotebookTaskRunsRequestPb.class) + .add("cleanRoomName", cleanRoomName) + .add("notebookName", notebookName) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponse.java index 9226f6a4e..ee83061ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponse.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListCleanRoomNotebookTaskRunsResponse.ListCleanRoomNotebookTaskRunsResponseSerializer.class) +@JsonDeserialize( + using = + ListCleanRoomNotebookTaskRunsResponse.ListCleanRoomNotebookTaskRunsResponseDeserializer + .class) public class ListCleanRoomNotebookTaskRunsResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * page_token should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** Name of the clean room. */ - @JsonProperty("runs") private Collection runs; public ListCleanRoomNotebookTaskRunsResponse setNextPageToken(String nextPageToken) { @@ -59,4 +73,44 @@ public String toString() { .add("runs", runs) .toString(); } + + ListCleanRoomNotebookTaskRunsResponsePb toPb() { + ListCleanRoomNotebookTaskRunsResponsePb pb = new ListCleanRoomNotebookTaskRunsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRuns(runs); + + return pb; + } + + static ListCleanRoomNotebookTaskRunsResponse fromPb(ListCleanRoomNotebookTaskRunsResponsePb pb) { + ListCleanRoomNotebookTaskRunsResponse model = new ListCleanRoomNotebookTaskRunsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRuns(pb.getRuns()); + + return model; + } + + public static class ListCleanRoomNotebookTaskRunsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomNotebookTaskRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomNotebookTaskRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomNotebookTaskRunsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomNotebookTaskRunsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomNotebookTaskRunsResponsePb pb = + mapper.readValue(p, ListCleanRoomNotebookTaskRunsResponsePb.class); + return ListCleanRoomNotebookTaskRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponsePb.java new file mode 100755 index 000000000..847ca90fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCleanRoomNotebookTaskRunsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("runs") + private Collection runs; + + public ListCleanRoomNotebookTaskRunsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListCleanRoomNotebookTaskRunsResponsePb setRuns( + Collection runs) { + this.runs = runs; + return this; + } + + public Collection getRuns() { + return runs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomNotebookTaskRunsResponsePb that = (ListCleanRoomNotebookTaskRunsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(runs, that.runs); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, runs); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomNotebookTaskRunsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("runs", runs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java index 45845b80a..992ad7507 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List clean rooms */ @Generated +@JsonSerialize(using = ListCleanRoomsRequest.ListCleanRoomsRequestSerializer.class) +@JsonDeserialize(using = ListCleanRoomsRequest.ListCleanRoomsRequestDeserializer.class) public class ListCleanRoomsRequest { /** Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListCleanRoomsRequest setPageSize(Long pageSize) { @@ -59,4 +65,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListCleanRoomsRequestPb toPb() { + ListCleanRoomsRequestPb pb = new ListCleanRoomsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListCleanRoomsRequest fromPb(ListCleanRoomsRequestPb pb) { + ListCleanRoomsRequest model = new ListCleanRoomsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListCleanRoomsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomsRequestPb pb = mapper.readValue(p, ListCleanRoomsRequestPb.class); + return ListCleanRoomsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequestPb.java new file mode 100755 index 000000000..2a19651e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List clean rooms */ +@Generated +class ListCleanRoomsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListCleanRoomsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListCleanRoomsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomsRequestPb that = (ListCleanRoomsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponse.java index 0956a8a22..2ad6ba48b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListCleanRoomsResponse.ListCleanRoomsResponseSerializer.class) +@JsonDeserialize(using = ListCleanRoomsResponse.ListCleanRoomsResponseDeserializer.class) public class ListCleanRoomsResponse { /** */ - @JsonProperty("clean_rooms") private Collection cleanRooms; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * page_token should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; public ListCleanRoomsResponse setCleanRooms(Collection cleanRooms) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListCleanRoomsResponsePb toPb() { + ListCleanRoomsResponsePb pb = new ListCleanRoomsResponsePb(); + pb.setCleanRooms(cleanRooms); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListCleanRoomsResponse fromPb(ListCleanRoomsResponsePb pb) { + ListCleanRoomsResponse model = new ListCleanRoomsResponse(); + model.setCleanRooms(pb.getCleanRooms()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListCleanRoomsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCleanRoomsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCleanRoomsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCleanRoomsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCleanRoomsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCleanRoomsResponsePb pb = mapper.readValue(p, ListCleanRoomsResponsePb.class); + return ListCleanRoomsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponsePb.java new file mode 100755 index 000000000..ad9c9fb2d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCleanRoomsResponsePb { + @JsonProperty("clean_rooms") + private Collection cleanRooms; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListCleanRoomsResponsePb setCleanRooms(Collection cleanRooms) { + this.cleanRooms = cleanRooms; + return this; + } + + public Collection getCleanRooms() { + return cleanRooms; + } + + public ListCleanRoomsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCleanRoomsResponsePb that = (ListCleanRoomsResponsePb) o; + return Objects.equals(cleanRooms, that.cleanRooms) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRooms, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCleanRoomsResponsePb.class) + .add("cleanRooms", cleanRooms) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java index f1d57be5b..85e8298b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update an asset */ @Generated +@JsonSerialize(using = UpdateCleanRoomAssetRequest.UpdateCleanRoomAssetRequestSerializer.class) +@JsonDeserialize(using = UpdateCleanRoomAssetRequest.UpdateCleanRoomAssetRequestDeserializer.class) public class UpdateCleanRoomAssetRequest { /** Metadata of the clean room asset */ - @JsonProperty("asset") private CleanRoomAsset asset; /** The type of the asset. */ - @JsonIgnore private CleanRoomAssetAssetType assetType; + private CleanRoomAssetAssetType assetType; /** Name of the clean room. */ - @JsonIgnore private String cleanRoomName; + private String cleanRoomName; /** * A fully qualified name that uniquely identifies the asset within the clean room. This is also @@ -30,7 +39,7 @@ public class UpdateCleanRoomAssetRequest { * *

For notebooks, the name is the notebook file name. */ - @JsonIgnore private String name; + private String name; public UpdateCleanRoomAssetRequest setAsset(CleanRoomAsset asset) { this.asset = asset; @@ -93,4 +102,47 @@ public String toString() { .add("name", name) .toString(); } + + UpdateCleanRoomAssetRequestPb toPb() { + UpdateCleanRoomAssetRequestPb pb = new UpdateCleanRoomAssetRequestPb(); + pb.setAsset(asset); + pb.setAssetType(assetType); + pb.setCleanRoomName(cleanRoomName); + pb.setName(name); + + return pb; + } + + static UpdateCleanRoomAssetRequest fromPb(UpdateCleanRoomAssetRequestPb pb) { + UpdateCleanRoomAssetRequest model = new UpdateCleanRoomAssetRequest(); + model.setAsset(pb.getAsset()); + model.setAssetType(pb.getAssetType()); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateCleanRoomAssetRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCleanRoomAssetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCleanRoomAssetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCleanRoomAssetRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateCleanRoomAssetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCleanRoomAssetRequestPb pb = mapper.readValue(p, UpdateCleanRoomAssetRequestPb.class); + return UpdateCleanRoomAssetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequestPb.java new file mode 100755 index 000000000..71c4a9d94 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequestPb.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update an asset */ +@Generated +class UpdateCleanRoomAssetRequestPb { + @JsonProperty("asset") + private CleanRoomAsset asset; + + @JsonIgnore private CleanRoomAssetAssetType assetType; + + @JsonIgnore private String cleanRoomName; + + @JsonIgnore private String name; + + public UpdateCleanRoomAssetRequestPb setAsset(CleanRoomAsset asset) { + this.asset = asset; + return this; + } + + public CleanRoomAsset getAsset() { + return asset; + } + + public UpdateCleanRoomAssetRequestPb setAssetType(CleanRoomAssetAssetType assetType) { + this.assetType = assetType; + return this; + } + + public CleanRoomAssetAssetType getAssetType() { + return assetType; + } + + public UpdateCleanRoomAssetRequestPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public UpdateCleanRoomAssetRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCleanRoomAssetRequestPb that = (UpdateCleanRoomAssetRequestPb) o; + return Objects.equals(asset, that.asset) + && Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(asset, assetType, cleanRoomName, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateCleanRoomAssetRequestPb.class) + .add("asset", asset) + .add("assetType", assetType) + .add("cleanRoomName", cleanRoomName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java index 32097e13a..6c47888bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCleanRoomRequest.UpdateCleanRoomRequestSerializer.class) +@JsonDeserialize(using = UpdateCleanRoomRequest.UpdateCleanRoomRequestDeserializer.class) public class UpdateCleanRoomRequest { /** */ - @JsonProperty("clean_room") private CleanRoom cleanRoom; /** Name of the clean room. */ - @JsonIgnore private String name; + private String name; public UpdateCleanRoomRequest setCleanRoom(CleanRoom cleanRoom) { this.cleanRoom = cleanRoom; @@ -55,4 +64,43 @@ public String toString() { .add("name", name) .toString(); } + + UpdateCleanRoomRequestPb toPb() { + UpdateCleanRoomRequestPb pb = new UpdateCleanRoomRequestPb(); + pb.setCleanRoom(cleanRoom); + pb.setName(name); + + return pb; + } + + static UpdateCleanRoomRequest fromPb(UpdateCleanRoomRequestPb pb) { + UpdateCleanRoomRequest model = new UpdateCleanRoomRequest(); + model.setCleanRoom(pb.getCleanRoom()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateCleanRoomRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCleanRoomRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCleanRoomRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCleanRoomRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateCleanRoomRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCleanRoomRequestPb pb = mapper.readValue(p, UpdateCleanRoomRequestPb.class); + return UpdateCleanRoomRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequestPb.java new file mode 100755 index 000000000..4b9fc231a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateCleanRoomRequestPb { + @JsonProperty("clean_room") + private CleanRoom cleanRoom; + + @JsonIgnore private String name; + + public UpdateCleanRoomRequestPb setCleanRoom(CleanRoom cleanRoom) { + this.cleanRoom = cleanRoom; + return this; + } + + public CleanRoom getCleanRoom() { + return cleanRoom; + } + + public UpdateCleanRoomRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCleanRoomRequestPb that = (UpdateCleanRoomRequestPb) o; + return Objects.equals(cleanRoom, that.cleanRoom) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoom, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateCleanRoomRequestPb.class) + .add("cleanRoom", cleanRoom) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java index 947d3bffa..9ec865212 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddInstanceProfile.AddInstanceProfileSerializer.class) +@JsonDeserialize(using = AddInstanceProfile.AddInstanceProfileDeserializer.class) public class AddInstanceProfile { /** * The AWS IAM role ARN of the role associated with the instance profile. This field is required @@ -18,11 +29,9 @@ public class AddInstanceProfile { * *

[Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html */ - @JsonProperty("iam_role_arn") private String iamRoleArn; /** The AWS ARN of the instance profile to register with Databricks. This field is required. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** @@ -31,7 +40,6 @@ public class AddInstanceProfile { * could assume a wide range of roles. Therefore it should always be used with authorization. This * field is optional, the default value is `false`. */ - @JsonProperty("is_meta_instance_profile") private Boolean isMetaInstanceProfile; /** @@ -41,7 +49,6 @@ public class AddInstanceProfile { * (e.g. “Your requested instance type is not supported in your requested availability zone”), you * can pass this flag to skip the validation and forcibly add the instance profile. */ - @JsonProperty("skip_validation") private Boolean skipValidation; public AddInstanceProfile setIamRoleArn(String iamRoleArn) { @@ -105,4 +112,44 @@ public String toString() { .add("skipValidation", skipValidation) .toString(); } + + AddInstanceProfilePb toPb() { + AddInstanceProfilePb pb = new AddInstanceProfilePb(); + pb.setIamRoleArn(iamRoleArn); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setIsMetaInstanceProfile(isMetaInstanceProfile); + pb.setSkipValidation(skipValidation); + + return pb; + } + + static AddInstanceProfile fromPb(AddInstanceProfilePb pb) { + AddInstanceProfile model = new AddInstanceProfile(); + model.setIamRoleArn(pb.getIamRoleArn()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setIsMetaInstanceProfile(pb.getIsMetaInstanceProfile()); + model.setSkipValidation(pb.getSkipValidation()); + + return model; + } + + public static class AddInstanceProfileSerializer extends JsonSerializer { + @Override + public void serialize(AddInstanceProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddInstanceProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddInstanceProfileDeserializer extends JsonDeserializer { + @Override + public AddInstanceProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddInstanceProfilePb pb = mapper.readValue(p, AddInstanceProfilePb.class); + return AddInstanceProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfilePb.java new file mode 100755 index 000000000..bdb797838 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfilePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AddInstanceProfilePb { + @JsonProperty("iam_role_arn") + private String iamRoleArn; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("is_meta_instance_profile") + private Boolean isMetaInstanceProfile; + + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public AddInstanceProfilePb setIamRoleArn(String iamRoleArn) { + this.iamRoleArn = iamRoleArn; + return this; + } + + public String getIamRoleArn() { + return iamRoleArn; + } + + public AddInstanceProfilePb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public AddInstanceProfilePb setIsMetaInstanceProfile(Boolean isMetaInstanceProfile) { + this.isMetaInstanceProfile = isMetaInstanceProfile; + return this; + } + + public Boolean getIsMetaInstanceProfile() { + return isMetaInstanceProfile; + } + + public AddInstanceProfilePb setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddInstanceProfilePb that = (AddInstanceProfilePb) o; + return Objects.equals(iamRoleArn, that.iamRoleArn) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(isMetaInstanceProfile, that.isMetaInstanceProfile) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash(iamRoleArn, instanceProfileArn, isMetaInstanceProfile, skipValidation); + } + + @Override + public String toString() { + return new ToStringer(AddInstanceProfilePb.class) + .add("iamRoleArn", iamRoleArn) + .add("instanceProfileArn", instanceProfileArn) + .add("isMetaInstanceProfile", isMetaInstanceProfile) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java index 2c169a9d7..4588ab415 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddResponse.AddResponseSerializer.class) +@JsonDeserialize(using = AddResponse.AddResponseDeserializer.class) public class AddResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(AddResponse.class).toString(); } + + AddResponsePb toPb() { + AddResponsePb pb = new AddResponsePb(); + + return pb; + } + + static AddResponse fromPb(AddResponsePb pb) { + AddResponse model = new AddResponse(); + + return model; + } + + public static class AddResponseSerializer extends JsonSerializer { + @Override + public void serialize(AddResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddResponseDeserializer extends JsonDeserializer { + @Override + public AddResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddResponsePb pb = mapper.readValue(p, AddResponsePb.class); + return AddResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponsePb.java new file mode 100755 index 000000000..8e225beee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class AddResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AddResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java index 9aa66fa06..e9791e2e6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location in Adls Gen2 */ @Generated +@JsonSerialize(using = Adlsgen2Info.Adlsgen2InfoSerializer.class) +@JsonDeserialize(using = Adlsgen2Info.Adlsgen2InfoDeserializer.class) public class Adlsgen2Info { /** * abfss destination, e.g. * `abfss://@.dfs.core.windows.net/`. */ - @JsonProperty("destination") private String destination; public Adlsgen2Info setDestination(String destination) { @@ -43,4 +53,37 @@ public int hashCode() { public String toString() { return new ToStringer(Adlsgen2Info.class).add("destination", destination).toString(); } + + Adlsgen2InfoPb toPb() { + Adlsgen2InfoPb pb = new Adlsgen2InfoPb(); + pb.setDestination(destination); + + return pb; + } + + static Adlsgen2Info fromPb(Adlsgen2InfoPb pb) { + Adlsgen2Info model = new Adlsgen2Info(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class Adlsgen2InfoSerializer extends JsonSerializer { + @Override + public void serialize(Adlsgen2Info value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + Adlsgen2InfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class Adlsgen2InfoDeserializer extends JsonDeserializer { + @Override + public Adlsgen2Info deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + Adlsgen2InfoPb pb = mapper.readValue(p, Adlsgen2InfoPb.class); + return Adlsgen2Info.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2InfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2InfoPb.java new file mode 100755 index 000000000..19085551e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2InfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location in Adls Gen2 */ +@Generated +class Adlsgen2InfoPb { + @JsonProperty("destination") + private String destination; + + public Adlsgen2InfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Adlsgen2InfoPb that = (Adlsgen2InfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(Adlsgen2InfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java index 839464c6a..b33faed23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AutoScale.AutoScaleSerializer.class) +@JsonDeserialize(using = AutoScale.AutoScaleDeserializer.class) public class AutoScale { /** * The maximum number of workers to which the cluster can scale up when overloaded. Note that * `max_workers` must be strictly greater than `min_workers`. */ - @JsonProperty("max_workers") private Long maxWorkers; /** * The minimum number of workers to which the cluster can scale down when underutilized. It is * also the initial number of workers the cluster will have after creation. */ - @JsonProperty("min_workers") private Long minWorkers; public AutoScale setMaxWorkers(Long maxWorkers) { @@ -62,4 +71,39 @@ public String toString() { .add("minWorkers", minWorkers) .toString(); } + + AutoScalePb toPb() { + AutoScalePb pb = new AutoScalePb(); + pb.setMaxWorkers(maxWorkers); + pb.setMinWorkers(minWorkers); + + return pb; + } + + static AutoScale fromPb(AutoScalePb pb) { + AutoScale model = new AutoScale(); + model.setMaxWorkers(pb.getMaxWorkers()); + model.setMinWorkers(pb.getMinWorkers()); + + return model; + } + + public static class AutoScaleSerializer extends JsonSerializer { + @Override + public void serialize(AutoScale value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AutoScalePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AutoScaleDeserializer extends JsonDeserializer { + @Override + public AutoScale deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AutoScalePb pb = mapper.readValue(p, AutoScalePb.class); + return AutoScale.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScalePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScalePb.java new file mode 100755 index 000000000..acc5a7c71 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScalePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AutoScalePb { + @JsonProperty("max_workers") + private Long maxWorkers; + + @JsonProperty("min_workers") + private Long minWorkers; + + public AutoScalePb setMaxWorkers(Long maxWorkers) { + this.maxWorkers = maxWorkers; + return this; + } + + public Long getMaxWorkers() { + return maxWorkers; + } + + public AutoScalePb setMinWorkers(Long minWorkers) { + this.minWorkers = minWorkers; + return this; + } + + public Long getMinWorkers() { + return minWorkers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoScalePb that = (AutoScalePb) o; + return Objects.equals(maxWorkers, that.maxWorkers) + && Objects.equals(minWorkers, that.minWorkers); + } + + @Override + public int hashCode() { + return Objects.hash(maxWorkers, minWorkers); + } + + @Override + public String toString() { + return new ToStringer(AutoScalePb.class) + .add("maxWorkers", maxWorkers) + .add("minWorkers", minWorkers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java index 825127bce..429b13f74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during cluster creation which are related to Amazon Web Services. */ @Generated +@JsonSerialize(using = AwsAttributes.AwsAttributesSerializer.class) +@JsonDeserialize(using = AwsAttributes.AwsAttributesDeserializer.class) public class AwsAttributes { /** * Availability type used for all subsequent nodes past the `first_on_demand` ones. @@ -16,7 +27,6 @@ public class AwsAttributes { *

Note: If `first_on_demand` is zero, this availability type will be used for the entire * cluster. */ - @JsonProperty("availability") private AwsAvailability availability; /** @@ -36,14 +46,12 @@ public class AwsAttributes { *

Please note that if EBS volumes are specified, then the Spark configuration * `spark.local.dir` will be overridden. */ - @JsonProperty("ebs_volume_count") private Long ebsVolumeCount; /** * If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum * performance of a gp2 volume with the same volume size will be used. */ - @JsonProperty("ebs_volume_iops") private Long ebsVolumeIops; /** @@ -51,18 +59,15 @@ public class AwsAttributes { * value must be within the range 100 - 4096. For throughput optimized HDD, this value must be * within the range 500 - 4096. */ - @JsonProperty("ebs_volume_size") private Long ebsVolumeSize; /** * If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum * performance of a gp2 volume with the same volume size will be used. */ - @JsonProperty("ebs_volume_throughput") private Long ebsVolumeThroughput; /** The type of EBS volumes that will be launched with this cluster. */ - @JsonProperty("ebs_volume_type") private EbsVolumeType ebsVolumeType; /** @@ -74,7 +79,6 @@ public class AwsAttributes { * on `availability` instances. Note that this value does not affect cluster size and cannot * currently be mutated over the lifetime of a cluster. */ - @JsonProperty("first_on_demand") private Long firstOnDemand; /** @@ -85,7 +89,6 @@ public class AwsAttributes { * *

This feature may only be available to certain customer plans. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** @@ -98,7 +101,6 @@ public class AwsAttributes { * matches this field will be considered. Note that, for safety, we enforce this field to be no * more than 10000. */ - @JsonProperty("spot_bid_price_percent") private Long spotBidPricePercent; /** @@ -113,7 +115,6 @@ public class AwsAttributes { *

The list of available zones as well as the default value can be found by using the `List * Zones` method. */ - @JsonProperty("zone_id") private String zoneId; public AwsAttributes setAvailability(AwsAvailability availability) { @@ -253,4 +254,55 @@ public String toString() { .add("zoneId", zoneId) .toString(); } + + AwsAttributesPb toPb() { + AwsAttributesPb pb = new AwsAttributesPb(); + pb.setAvailability(availability); + pb.setEbsVolumeCount(ebsVolumeCount); + pb.setEbsVolumeIops(ebsVolumeIops); + pb.setEbsVolumeSize(ebsVolumeSize); + pb.setEbsVolumeThroughput(ebsVolumeThroughput); + pb.setEbsVolumeType(ebsVolumeType); + pb.setFirstOnDemand(firstOnDemand); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setSpotBidPricePercent(spotBidPricePercent); + pb.setZoneId(zoneId); + + return pb; + } + + static AwsAttributes fromPb(AwsAttributesPb pb) { + AwsAttributes model = new AwsAttributes(); + model.setAvailability(pb.getAvailability()); + model.setEbsVolumeCount(pb.getEbsVolumeCount()); + model.setEbsVolumeIops(pb.getEbsVolumeIops()); + model.setEbsVolumeSize(pb.getEbsVolumeSize()); + model.setEbsVolumeThroughput(pb.getEbsVolumeThroughput()); + model.setEbsVolumeType(pb.getEbsVolumeType()); + model.setFirstOnDemand(pb.getFirstOnDemand()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setSpotBidPricePercent(pb.getSpotBidPricePercent()); + model.setZoneId(pb.getZoneId()); + + return model; + } + + public static class AwsAttributesSerializer extends JsonSerializer { + @Override + public void serialize(AwsAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsAttributesDeserializer extends JsonDeserializer { + @Override + public AwsAttributes deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsAttributesPb pb = mapper.readValue(p, AwsAttributesPb.class); + return AwsAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributesPb.java new file mode 100755 index 000000000..4f7a4236d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributesPb.java @@ -0,0 +1,180 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during cluster creation which are related to Amazon Web Services. */ +@Generated +class AwsAttributesPb { + @JsonProperty("availability") + private AwsAvailability availability; + + @JsonProperty("ebs_volume_count") + private Long ebsVolumeCount; + + @JsonProperty("ebs_volume_iops") + private Long ebsVolumeIops; + + @JsonProperty("ebs_volume_size") + private Long ebsVolumeSize; + + @JsonProperty("ebs_volume_throughput") + private Long ebsVolumeThroughput; + + @JsonProperty("ebs_volume_type") + private EbsVolumeType ebsVolumeType; + + @JsonProperty("first_on_demand") + private Long firstOnDemand; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("spot_bid_price_percent") + private Long spotBidPricePercent; + + @JsonProperty("zone_id") + private String zoneId; + + public AwsAttributesPb setAvailability(AwsAvailability availability) { + this.availability = availability; + return this; + } + + public AwsAvailability getAvailability() { + return availability; + } + + public AwsAttributesPb setEbsVolumeCount(Long ebsVolumeCount) { + this.ebsVolumeCount = ebsVolumeCount; + return this; + } + + public Long getEbsVolumeCount() { + return ebsVolumeCount; + } + + public AwsAttributesPb setEbsVolumeIops(Long ebsVolumeIops) { + this.ebsVolumeIops = ebsVolumeIops; + return this; + } + + public Long getEbsVolumeIops() { + return ebsVolumeIops; + } + + public AwsAttributesPb setEbsVolumeSize(Long ebsVolumeSize) { + this.ebsVolumeSize = ebsVolumeSize; + return this; + } + + public Long getEbsVolumeSize() { + return ebsVolumeSize; + } + + public AwsAttributesPb setEbsVolumeThroughput(Long ebsVolumeThroughput) { + this.ebsVolumeThroughput = ebsVolumeThroughput; + return this; + } + + public Long getEbsVolumeThroughput() { + return ebsVolumeThroughput; + } + + public AwsAttributesPb setEbsVolumeType(EbsVolumeType ebsVolumeType) { + this.ebsVolumeType = ebsVolumeType; + return this; + } + + public EbsVolumeType getEbsVolumeType() { + return ebsVolumeType; + } + + public AwsAttributesPb setFirstOnDemand(Long firstOnDemand) { + this.firstOnDemand = firstOnDemand; + return this; + } + + public Long getFirstOnDemand() { + return firstOnDemand; + } + + public AwsAttributesPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public AwsAttributesPb setSpotBidPricePercent(Long spotBidPricePercent) { + this.spotBidPricePercent = spotBidPricePercent; + return this; + } + + public Long getSpotBidPricePercent() { + return spotBidPricePercent; + } + + public AwsAttributesPb setZoneId(String zoneId) { + this.zoneId = zoneId; + return this; + } + + public String getZoneId() { + return zoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsAttributesPb that = (AwsAttributesPb) o; + return Objects.equals(availability, that.availability) + && Objects.equals(ebsVolumeCount, that.ebsVolumeCount) + && Objects.equals(ebsVolumeIops, that.ebsVolumeIops) + && Objects.equals(ebsVolumeSize, that.ebsVolumeSize) + && Objects.equals(ebsVolumeThroughput, that.ebsVolumeThroughput) + && Objects.equals(ebsVolumeType, that.ebsVolumeType) + && Objects.equals(firstOnDemand, that.firstOnDemand) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(spotBidPricePercent, that.spotBidPricePercent) + && Objects.equals(zoneId, that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash( + availability, + ebsVolumeCount, + ebsVolumeIops, + ebsVolumeSize, + ebsVolumeThroughput, + ebsVolumeType, + firstOnDemand, + instanceProfileArn, + spotBidPricePercent, + zoneId); + } + + @Override + public String toString() { + return new ToStringer(AwsAttributesPb.class) + .add("availability", availability) + .add("ebsVolumeCount", ebsVolumeCount) + .add("ebsVolumeIops", ebsVolumeIops) + .add("ebsVolumeSize", ebsVolumeSize) + .add("ebsVolumeThroughput", ebsVolumeThroughput) + .add("ebsVolumeType", ebsVolumeType) + .add("firstOnDemand", firstOnDemand) + .add("instanceProfileArn", instanceProfileArn) + .add("spotBidPricePercent", spotBidPricePercent) + .add("zoneId", zoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java index ec8f49702..860dfe3e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during cluster creation which are related to Microsoft Azure. */ @Generated +@JsonSerialize(using = AzureAttributes.AzureAttributesSerializer.class) +@JsonDeserialize(using = AzureAttributes.AzureAttributesDeserializer.class) public class AzureAttributes { /** * Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If * `first_on_demand` is zero, this availability type will be used for the entire cluster. */ - @JsonProperty("availability") private AzureAvailability availability; /** @@ -26,11 +36,9 @@ public class AzureAttributes { * on `availability` instances. Note that this value does not affect cluster size and cannot * currently be mutated over the lifetime of a cluster. */ - @JsonProperty("first_on_demand") private Long firstOnDemand; /** Defines values necessary to configure and run Azure Log Analytics agent */ - @JsonProperty("log_analytics_info") private LogAnalyticsInfo logAnalyticsInfo; /** @@ -39,7 +47,6 @@ public class AzureAttributes { * which specifies that the instance cannot be evicted on the basis of price, and only on the * basis of availability. Further, the value should > 0 or -1. */ - @JsonProperty("spot_bid_max_price") private Double spotBidMaxPrice; public AzureAttributes setAvailability(AzureAvailability availability) { @@ -103,4 +110,44 @@ public String toString() { .add("spotBidMaxPrice", spotBidMaxPrice) .toString(); } + + AzureAttributesPb toPb() { + AzureAttributesPb pb = new AzureAttributesPb(); + pb.setAvailability(availability); + pb.setFirstOnDemand(firstOnDemand); + pb.setLogAnalyticsInfo(logAnalyticsInfo); + pb.setSpotBidMaxPrice(spotBidMaxPrice); + + return pb; + } + + static AzureAttributes fromPb(AzureAttributesPb pb) { + AzureAttributes model = new AzureAttributes(); + model.setAvailability(pb.getAvailability()); + model.setFirstOnDemand(pb.getFirstOnDemand()); + model.setLogAnalyticsInfo(pb.getLogAnalyticsInfo()); + model.setSpotBidMaxPrice(pb.getSpotBidMaxPrice()); + + return model; + } + + public static class AzureAttributesSerializer extends JsonSerializer { + @Override + public void serialize(AzureAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureAttributesDeserializer extends JsonDeserializer { + @Override + public AzureAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureAttributesPb pb = mapper.readValue(p, AzureAttributesPb.class); + return AzureAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributesPb.java new file mode 100755 index 000000000..b54f36cba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributesPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during cluster creation which are related to Microsoft Azure. */ +@Generated +class AzureAttributesPb { + @JsonProperty("availability") + private AzureAvailability availability; + + @JsonProperty("first_on_demand") + private Long firstOnDemand; + + @JsonProperty("log_analytics_info") + private LogAnalyticsInfo logAnalyticsInfo; + + @JsonProperty("spot_bid_max_price") + private Double spotBidMaxPrice; + + public AzureAttributesPb setAvailability(AzureAvailability availability) { + this.availability = availability; + return this; + } + + public AzureAvailability getAvailability() { + return availability; + } + + public AzureAttributesPb setFirstOnDemand(Long firstOnDemand) { + this.firstOnDemand = firstOnDemand; + return this; + } + + public Long getFirstOnDemand() { + return firstOnDemand; + } + + public AzureAttributesPb setLogAnalyticsInfo(LogAnalyticsInfo logAnalyticsInfo) { + this.logAnalyticsInfo = logAnalyticsInfo; + return this; + } + + public LogAnalyticsInfo getLogAnalyticsInfo() { + return logAnalyticsInfo; + } + + public AzureAttributesPb setSpotBidMaxPrice(Double spotBidMaxPrice) { + this.spotBidMaxPrice = spotBidMaxPrice; + return this; + } + + public Double getSpotBidMaxPrice() { + return spotBidMaxPrice; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureAttributesPb that = (AzureAttributesPb) o; + return Objects.equals(availability, that.availability) + && Objects.equals(firstOnDemand, that.firstOnDemand) + && Objects.equals(logAnalyticsInfo, that.logAnalyticsInfo) + && Objects.equals(spotBidMaxPrice, that.spotBidMaxPrice); + } + + @Override + public int hashCode() { + return Objects.hash(availability, firstOnDemand, logAnalyticsInfo, spotBidMaxPrice); + } + + @Override + public String toString() { + return new ToStringer(AzureAttributesPb.class) + .add("availability", availability) + .add("firstOnDemand", firstOnDemand) + .add("logAnalyticsInfo", logAnalyticsInfo) + .add("spotBidMaxPrice", spotBidMaxPrice) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java index 2ddfa2fb9..87599082d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelCommand.CancelCommandSerializer.class) +@JsonDeserialize(using = CancelCommand.CancelCommandDeserializer.class) public class CancelCommand { /** */ - @JsonProperty("clusterId") private String clusterId; /** */ - @JsonProperty("commandId") private String commandId; /** */ - @JsonProperty("contextId") private String contextId; public CancelCommand setClusterId(String clusterId) { @@ -71,4 +79,41 @@ public String toString() { .add("contextId", contextId) .toString(); } + + CancelCommandPb toPb() { + CancelCommandPb pb = new CancelCommandPb(); + pb.setClusterId(clusterId); + pb.setCommandId(commandId); + pb.setContextId(contextId); + + return pb; + } + + static CancelCommand fromPb(CancelCommandPb pb) { + CancelCommand model = new CancelCommand(); + model.setClusterId(pb.getClusterId()); + model.setCommandId(pb.getCommandId()); + model.setContextId(pb.getContextId()); + + return model; + } + + public static class CancelCommandSerializer extends JsonSerializer { + @Override + public void serialize(CancelCommand value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelCommandPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelCommandDeserializer extends JsonDeserializer { + @Override + public CancelCommand deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelCommandPb pb = mapper.readValue(p, CancelCommandPb.class); + return CancelCommand.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommandPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommandPb.java new file mode 100755 index 000000000..0de61a496 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommandPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CancelCommandPb { + @JsonProperty("clusterId") + private String clusterId; + + @JsonProperty("commandId") + private String commandId; + + @JsonProperty("contextId") + private String contextId; + + public CancelCommandPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public CancelCommandPb setCommandId(String commandId) { + this.commandId = commandId; + return this; + } + + public String getCommandId() { + return commandId; + } + + public CancelCommandPb setContextId(String contextId) { + this.contextId = contextId; + return this; + } + + public String getContextId() { + return contextId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelCommandPb that = (CancelCommandPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(commandId, that.commandId) + && Objects.equals(contextId, that.contextId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, commandId, contextId); + } + + @Override + public String toString() { + return new ToStringer(CancelCommandPb.class) + .add("clusterId", clusterId) + .add("commandId", commandId) + .add("contextId", contextId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java index 2109537fd..e431e4ffc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelResponse.CancelResponseSerializer.class) +@JsonDeserialize(using = CancelResponse.CancelResponseDeserializer.class) public class CancelResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(CancelResponse.class).toString(); } + + CancelResponsePb toPb() { + CancelResponsePb pb = new CancelResponsePb(); + + return pb; + } + + static CancelResponse fromPb(CancelResponsePb pb) { + CancelResponse model = new CancelResponse(); + + return model; + } + + public static class CancelResponseSerializer extends JsonSerializer { + @Override + public void serialize(CancelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelResponseDeserializer extends JsonDeserializer { + @Override + public CancelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelResponsePb pb = mapper.readValue(p, CancelResponsePb.class); + return CancelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponsePb.java new file mode 100755 index 000000000..512fe1dfd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CancelResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CancelResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java index 2bd85e237..bc50a3420 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ChangeClusterOwner.ChangeClusterOwnerSerializer.class) +@JsonDeserialize(using = ChangeClusterOwner.ChangeClusterOwnerDeserializer.class) public class ChangeClusterOwner { /** */ - @JsonProperty("cluster_id") private String clusterId; /** New owner of the cluster_id after this RPC. */ - @JsonProperty("owner_username") private String ownerUsername; public ChangeClusterOwner setClusterId(String clusterId) { @@ -56,4 +65,40 @@ public String toString() { .add("ownerUsername", ownerUsername) .toString(); } + + ChangeClusterOwnerPb toPb() { + ChangeClusterOwnerPb pb = new ChangeClusterOwnerPb(); + pb.setClusterId(clusterId); + pb.setOwnerUsername(ownerUsername); + + return pb; + } + + static ChangeClusterOwner fromPb(ChangeClusterOwnerPb pb) { + ChangeClusterOwner model = new ChangeClusterOwner(); + model.setClusterId(pb.getClusterId()); + model.setOwnerUsername(pb.getOwnerUsername()); + + return model; + } + + public static class ChangeClusterOwnerSerializer extends JsonSerializer { + @Override + public void serialize(ChangeClusterOwner value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ChangeClusterOwnerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ChangeClusterOwnerDeserializer extends JsonDeserializer { + @Override + public ChangeClusterOwner deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ChangeClusterOwnerPb pb = mapper.readValue(p, ChangeClusterOwnerPb.class); + return ChangeClusterOwner.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerPb.java new file mode 100755 index 000000000..5282e0cc0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ChangeClusterOwnerPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("owner_username") + private String ownerUsername; + + public ChangeClusterOwnerPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ChangeClusterOwnerPb setOwnerUsername(String ownerUsername) { + this.ownerUsername = ownerUsername; + return this; + } + + public String getOwnerUsername() { + return ownerUsername; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ChangeClusterOwnerPb that = (ChangeClusterOwnerPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(ownerUsername, that.ownerUsername); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, ownerUsername); + } + + @Override + public String toString() { + return new ToStringer(ChangeClusterOwnerPb.class) + .add("clusterId", clusterId) + .add("ownerUsername", ownerUsername) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java index 36b86fe5f..d32e2d384 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ChangeClusterOwnerResponse.ChangeClusterOwnerResponseSerializer.class) +@JsonDeserialize(using = ChangeClusterOwnerResponse.ChangeClusterOwnerResponseDeserializer.class) public class ChangeClusterOwnerResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(ChangeClusterOwnerResponse.class).toString(); } + + ChangeClusterOwnerResponsePb toPb() { + ChangeClusterOwnerResponsePb pb = new ChangeClusterOwnerResponsePb(); + + return pb; + } + + static ChangeClusterOwnerResponse fromPb(ChangeClusterOwnerResponsePb pb) { + ChangeClusterOwnerResponse model = new ChangeClusterOwnerResponse(); + + return model; + } + + public static class ChangeClusterOwnerResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ChangeClusterOwnerResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ChangeClusterOwnerResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ChangeClusterOwnerResponseDeserializer + extends JsonDeserializer { + @Override + public ChangeClusterOwnerResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ChangeClusterOwnerResponsePb pb = mapper.readValue(p, ChangeClusterOwnerResponsePb.class); + return ChangeClusterOwnerResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponsePb.java new file mode 100755 index 000000000..04a7eb9bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ChangeClusterOwnerResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ChangeClusterOwnerResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java index e2004d8fc..bf7a788c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClientsTypes.ClientsTypesSerializer.class) +@JsonDeserialize(using = ClientsTypes.ClientsTypesDeserializer.class) public class ClientsTypes { /** With jobs set, the cluster can be used for jobs */ - @JsonProperty("jobs") private Boolean jobs; /** With notebooks set, this cluster can be used for notebooks */ - @JsonProperty("notebooks") private Boolean notebooks; public ClientsTypes setJobs(Boolean jobs) { @@ -55,4 +64,39 @@ public String toString() { .add("notebooks", notebooks) .toString(); } + + ClientsTypesPb toPb() { + ClientsTypesPb pb = new ClientsTypesPb(); + pb.setJobs(jobs); + pb.setNotebooks(notebooks); + + return pb; + } + + static ClientsTypes fromPb(ClientsTypesPb pb) { + ClientsTypes model = new ClientsTypes(); + model.setJobs(pb.getJobs()); + model.setNotebooks(pb.getNotebooks()); + + return model; + } + + public static class ClientsTypesSerializer extends JsonSerializer { + @Override + public void serialize(ClientsTypes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClientsTypesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClientsTypesDeserializer extends JsonDeserializer { + @Override + public ClientsTypes deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClientsTypesPb pb = mapper.readValue(p, ClientsTypesPb.class); + return ClientsTypes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypesPb.java new file mode 100755 index 000000000..99e242fb0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypesPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClientsTypesPb { + @JsonProperty("jobs") + private Boolean jobs; + + @JsonProperty("notebooks") + private Boolean notebooks; + + public ClientsTypesPb setJobs(Boolean jobs) { + this.jobs = jobs; + return this; + } + + public Boolean getJobs() { + return jobs; + } + + public ClientsTypesPb setNotebooks(Boolean notebooks) { + this.notebooks = notebooks; + return this; + } + + public Boolean getNotebooks() { + return notebooks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClientsTypesPb that = (ClientsTypesPb) o; + return Objects.equals(jobs, that.jobs) && Objects.equals(notebooks, that.notebooks); + } + + @Override + public int hashCode() { + return Objects.hash(jobs, notebooks); + } + + @Override + public String toString() { + return new ToStringer(ClientsTypesPb.class) + .add("jobs", jobs) + .add("notebooks", notebooks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java index 8846cf8aa..128ac3a2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CloneCluster.CloneClusterSerializer.class) +@JsonDeserialize(using = CloneCluster.CloneClusterDeserializer.class) public class CloneCluster { /** The cluster that is being cloned. */ - @JsonProperty("source_cluster_id") private String sourceClusterId; public CloneCluster setSourceClusterId(String sourceClusterId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(CloneCluster.class).add("sourceClusterId", sourceClusterId).toString(); } + + CloneClusterPb toPb() { + CloneClusterPb pb = new CloneClusterPb(); + pb.setSourceClusterId(sourceClusterId); + + return pb; + } + + static CloneCluster fromPb(CloneClusterPb pb) { + CloneCluster model = new CloneCluster(); + model.setSourceClusterId(pb.getSourceClusterId()); + + return model; + } + + public static class CloneClusterSerializer extends JsonSerializer { + @Override + public void serialize(CloneCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CloneClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloneClusterDeserializer extends JsonDeserializer { + @Override + public CloneCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CloneClusterPb pb = mapper.readValue(p, CloneClusterPb.class); + return CloneCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneClusterPb.java new file mode 100755 index 000000000..3cc9548b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CloneClusterPb { + @JsonProperty("source_cluster_id") + private String sourceClusterId; + + public CloneClusterPb setSourceClusterId(String sourceClusterId) { + this.sourceClusterId = sourceClusterId; + return this; + } + + public String getSourceClusterId() { + return sourceClusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CloneClusterPb that = (CloneClusterPb) o; + return Objects.equals(sourceClusterId, that.sourceClusterId); + } + + @Override + public int hashCode() { + return Objects.hash(sourceClusterId); + } + + @Override + public String toString() { + return new ToStringer(CloneClusterPb.class).add("sourceClusterId", sourceClusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java index ca72a882b..b35550d6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CloudProviderNodeInfo.CloudProviderNodeInfoSerializer.class) +@JsonDeserialize(using = CloudProviderNodeInfo.CloudProviderNodeInfoDeserializer.class) public class CloudProviderNodeInfo { /** Status as reported by the cloud provider */ - @JsonProperty("status") private Collection status; public CloudProviderNodeInfo setStatus(Collection status) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CloudProviderNodeInfo.class).add("status", status).toString(); } + + CloudProviderNodeInfoPb toPb() { + CloudProviderNodeInfoPb pb = new CloudProviderNodeInfoPb(); + pb.setStatus(status); + + return pb; + } + + static CloudProviderNodeInfo fromPb(CloudProviderNodeInfoPb pb) { + CloudProviderNodeInfo model = new CloudProviderNodeInfo(); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class CloudProviderNodeInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + CloudProviderNodeInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CloudProviderNodeInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloudProviderNodeInfoDeserializer + extends JsonDeserializer { + @Override + public CloudProviderNodeInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CloudProviderNodeInfoPb pb = mapper.readValue(p, CloudProviderNodeInfoPb.class); + return CloudProviderNodeInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfoPb.java similarity index 59% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfoPb.java index 9de9b2743..39908ce45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfoPb.java @@ -1,24 +1,24 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated -public class PollQueryStatusResponseData { - /** */ +class CloudProviderNodeInfoPb { @JsonProperty("status") - private QueryResponseStatus status; + private Collection status; - public PollQueryStatusResponseData setStatus(QueryResponseStatus status) { + public CloudProviderNodeInfoPb setStatus(Collection status) { this.status = status; return this; } - public QueryResponseStatus getStatus() { + public Collection getStatus() { return status; } @@ -26,7 +26,7 @@ public QueryResponseStatus getStatus() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - PollQueryStatusResponseData that = (PollQueryStatusResponseData) o; + CloudProviderNodeInfoPb that = (CloudProviderNodeInfoPb) o; return Objects.equals(status, that.status); } @@ -37,6 +37,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString(); + return new ToStringer(CloudProviderNodeInfoPb.class).add("status", status).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java index c16994224..4ee34f717 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterAccessControlRequest.ClusterAccessControlRequestSerializer.class) +@JsonDeserialize(using = ClusterAccessControlRequest.ClusterAccessControlRequestDeserializer.class) public class ClusterAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ClusterAccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,47 @@ public String toString() { .add("userName", userName) .toString(); } + + ClusterAccessControlRequestPb toPb() { + ClusterAccessControlRequestPb pb = new ClusterAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ClusterAccessControlRequest fromPb(ClusterAccessControlRequestPb pb) { + ClusterAccessControlRequest model = new ClusterAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ClusterAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public ClusterAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAccessControlRequestPb pb = mapper.readValue(p, ClusterAccessControlRequestPb.class); + return ClusterAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequestPb.java new file mode 100755 index 000000000..e0d4b0aef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private ClusterPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ClusterAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ClusterAccessControlRequestPb setPermissionLevel(ClusterPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ClusterAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ClusterAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAccessControlRequestPb that = (ClusterAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ClusterAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponse.java index c120dec4f..38022f2e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponse.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterAccessControlResponse.ClusterAccessControlResponseSerializer.class) +@JsonDeserialize( + using = ClusterAccessControlResponse.ClusterAccessControlResponseDeserializer.class) public class ClusterAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ClusterAccessControlResponse setAllPermissions( @@ -103,4 +110,49 @@ public String toString() { .add("userName", userName) .toString(); } + + ClusterAccessControlResponsePb toPb() { + ClusterAccessControlResponsePb pb = new ClusterAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ClusterAccessControlResponse fromPb(ClusterAccessControlResponsePb pb) { + ClusterAccessControlResponse model = new ClusterAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ClusterAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public ClusterAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAccessControlResponsePb pb = mapper.readValue(p, ClusterAccessControlResponsePb.class); + return ClusterAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponsePb.java new file mode 100755 index 000000000..9f2be8896 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ClusterAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public ClusterAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ClusterAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ClusterAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ClusterAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAccessControlResponsePb that = (ClusterAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ClusterAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index c359b9f8a..eb9354123 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -14,6 +23,8 @@ * lifetime of a cluster. */ @Generated +@JsonSerialize(using = ClusterAttributes.ClusterAttributesSerializer.class) +@JsonDeserialize(using = ClusterAttributes.ClusterAttributesDeserializer.class) public class ClusterAttributes { /** * Automatically terminates the cluster after it is inactive for this time in minutes. If not set, @@ -21,21 +32,18 @@ public class ClusterAttributes { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** @@ -45,7 +53,6 @@ public class ClusterAttributes { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** @@ -53,7 +60,6 @@ public class ClusterAttributes { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** @@ -65,7 +71,6 @@ public class ClusterAttributes { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -95,18 +100,15 @@ public class ClusterAttributes { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -117,7 +119,6 @@ public class ClusterAttributes { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -125,18 +126,15 @@ public class ClusterAttributes { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -144,11 +142,9 @@ public class ClusterAttributes { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -157,7 +153,6 @@ public class ClusterAttributes { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** @@ -175,7 +170,6 @@ public class ClusterAttributes { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** @@ -184,21 +178,11 @@ public class ClusterAttributes { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -208,11 +192,9 @@ public class ClusterAttributes { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -220,7 +202,6 @@ public class ClusterAttributes { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -236,14 +217,12 @@ public class ClusterAttributes { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** @@ -251,24 +230,17 @@ public class ClusterAttributes { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public ClusterAttributes setAutoterminationMinutes(Long autoterminationMinutes) { @@ -442,24 +414,6 @@ public String getPolicyId() { return policyId; } - public ClusterAttributes setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterAttributes setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterAttributes setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -514,16 +468,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public ClusterAttributes setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterAttributes setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -566,15 +510,12 @@ public boolean equals(Object o) { && Objects.equals(kind, that.kind) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -601,15 +542,12 @@ public int hashCode() { kind, nodeTypeId, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -636,17 +574,100 @@ public String toString() { .add("kind", kind) .add("nodeTypeId", nodeTypeId) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + ClusterAttributesPb toPb() { + ClusterAttributesPb pb = new ClusterAttributesPb(); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterName(clusterName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDockerImage(dockerImage); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setKind(kind); + pb.setNodeTypeId(nodeTypeId); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSshPublicKeys(sshPublicKeys); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ClusterAttributes fromPb(ClusterAttributesPb pb) { + ClusterAttributes model = new ClusterAttributes(); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterName(pb.getClusterName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDockerImage(pb.getDockerImage()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setKind(pb.getKind()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ClusterAttributesSerializer extends JsonSerializer { + @Override + public void serialize(ClusterAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAttributesDeserializer extends JsonDeserializer { + @Override + public ClusterAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAttributesPb pb = mapper.readValue(p, ClusterAttributesPb.class); + return ClusterAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributesPb.java new file mode 100755 index 000000000..d3bffb7f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributesPb.java @@ -0,0 +1,440 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +/** + * Common set of attributes set during cluster creation. These attributes cannot be changed over the + * lifetime of a cluster. + */ +@Generated +class ClusterAttributesPb { + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public ClusterAttributesPb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public ClusterAttributesPb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public ClusterAttributesPb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public ClusterAttributesPb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public ClusterAttributesPb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public ClusterAttributesPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public ClusterAttributesPb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public ClusterAttributesPb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public ClusterAttributesPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public ClusterAttributesPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public ClusterAttributesPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public ClusterAttributesPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public ClusterAttributesPb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public ClusterAttributesPb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public ClusterAttributesPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public ClusterAttributesPb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public ClusterAttributesPb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public ClusterAttributesPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public ClusterAttributesPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public ClusterAttributesPb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public ClusterAttributesPb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public ClusterAttributesPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public ClusterAttributesPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public ClusterAttributesPb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public ClusterAttributesPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public ClusterAttributesPb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public ClusterAttributesPb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAttributesPb that = (ClusterAttributesPb) o; + return Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(kind, that.kind) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoterminationMinutes, + awsAttributes, + azureAttributes, + clusterLogConf, + clusterName, + customTags, + dataSecurityMode, + dockerImage, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + kind, + nodeTypeId, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkEnvVars, + sparkVersion, + sshPublicKeys, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ClusterAttributesPb.class) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterLogConf", clusterLogConf) + .add("clusterName", clusterName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("dockerImage", dockerImage) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("kind", kind) + .add("nodeTypeId", nodeTypeId) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("sshPublicKeys", sshPublicKeys) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliance.java index 35489f994..512d3ab51 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliance.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterCompliance.ClusterComplianceSerializer.class) +@JsonDeserialize(using = ClusterCompliance.ClusterComplianceDeserializer.class) public class ClusterCompliance { /** Canonical unique identifier for a cluster. */ - @JsonProperty("cluster_id") private String clusterId; /** Whether this cluster is in compliance with the latest version of its policy. */ - @JsonProperty("is_compliant") private Boolean isCompliant; /** @@ -23,7 +32,6 @@ public class ClusterCompliance { * The keys indicate the path where the policy validation error is occurring. The values indicate * an error message describing the policy validation error. */ - @JsonProperty("violations") private Map violations; public ClusterCompliance setClusterId(String clusterId) { @@ -76,4 +84,42 @@ public String toString() { .add("violations", violations) .toString(); } + + ClusterCompliancePb toPb() { + ClusterCompliancePb pb = new ClusterCompliancePb(); + pb.setClusterId(clusterId); + pb.setIsCompliant(isCompliant); + pb.setViolations(violations); + + return pb; + } + + static ClusterCompliance fromPb(ClusterCompliancePb pb) { + ClusterCompliance model = new ClusterCompliance(); + model.setClusterId(pb.getClusterId()); + model.setIsCompliant(pb.getIsCompliant()); + model.setViolations(pb.getViolations()); + + return model; + } + + public static class ClusterComplianceSerializer extends JsonSerializer { + @Override + public void serialize(ClusterCompliance value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterCompliancePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterComplianceDeserializer extends JsonDeserializer { + @Override + public ClusterCompliance deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterCompliancePb pb = mapper.readValue(p, ClusterCompliancePb.class); + return ClusterCompliance.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliancePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliancePb.java new file mode 100755 index 000000000..b11f1c6c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterCompliancePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ClusterCompliancePb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("is_compliant") + private Boolean isCompliant; + + @JsonProperty("violations") + private Map violations; + + public ClusterCompliancePb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterCompliancePb setIsCompliant(Boolean isCompliant) { + this.isCompliant = isCompliant; + return this; + } + + public Boolean getIsCompliant() { + return isCompliant; + } + + public ClusterCompliancePb setViolations(Map violations) { + this.violations = violations; + return this; + } + + public Map getViolations() { + return violations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterCompliancePb that = (ClusterCompliancePb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(isCompliant, that.isCompliant) + && Objects.equals(violations, that.violations); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, isCompliant, violations); + } + + @Override + public String toString() { + return new ToStringer(ClusterCompliancePb.class) + .add("clusterId", clusterId) + .add("isCompliant", isCompliant) + .add("violations", violations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index b2309f101..14389b1e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -4,19 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; /** Describes all of the metadata about a single Spark cluster in Databricks. */ @Generated +@JsonSerialize(using = ClusterDetails.ClusterDetailsSerializer.class) +@JsonDeserialize(using = ClusterDetails.ClusterDetailsDeserializer.class) public class ClusterDetails { /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -25,21 +35,18 @@ public class ClusterDetails { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** @@ -47,14 +54,12 @@ public class ClusterDetails { * cores, since certain node types are configured to share cores between Spark nodes on the same * instance. */ - @JsonProperty("cluster_cores") private Double clusterCores; /** * Canonical identifier for the cluster. This id is retained during cluster restarts and resizes, * while each new cluster has a globally unique id. */ - @JsonProperty("cluster_id") private String clusterId; /** @@ -64,15 +69,12 @@ public class ClusterDetails { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** Cluster log delivery status. */ - @JsonProperty("cluster_log_status") private LogSyncStatus clusterLogStatus; /** Total amount of cluster memory, in megabytes */ - @JsonProperty("cluster_memory_mb") private Long clusterMemoryMb; /** @@ -80,21 +82,18 @@ public class ClusterDetails { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** * Determines whether the cluster was created by a user through the UI, created by the Databricks * Jobs Scheduler, or through an API request. */ - @JsonProperty("cluster_source") private ClusterSource clusterSource; /** * Creator user name. The field won't be included in the response if the user has already been * deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** @@ -106,7 +105,6 @@ public class ClusterDetails { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -136,7 +134,6 @@ public class ClusterDetails { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** @@ -152,25 +149,21 @@ public class ClusterDetails { * *

- Name: */ - @JsonProperty("default_tags") private Map defaultTags; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * Node on which the Spark driver resides. The driver node contains the Spark master and the * Databricks application that manages the per-notebook Spark REPLs. */ - @JsonProperty("driver") private SparkNode driver; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -181,7 +174,6 @@ public class ClusterDetails { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -189,22 +181,18 @@ public class ClusterDetails { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** Nodes on which the Spark executors reside. */ - @JsonProperty("executors") private Collection executors; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -212,11 +200,9 @@ public class ClusterDetails { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -225,14 +211,12 @@ public class ClusterDetails { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** * Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon * on this port in executor nodes. */ - @JsonProperty("jdbc_port") private Long jdbcPort; /** @@ -250,15 +234,12 @@ public class ClusterDetails { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** the timestamp that the cluster was started/restarted */ - @JsonProperty("last_restarted_time") private Long lastRestartedTime; /** Time when the cluster driver last lost its state (due to a restart or driver failure). */ - @JsonProperty("last_state_loss_time") private Long lastStateLossTime; /** @@ -267,7 +248,6 @@ public class ClusterDetails { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -280,21 +260,11 @@ public class ClusterDetails { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -304,11 +274,9 @@ public class ClusterDetails { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -316,7 +284,6 @@ public class ClusterDetails { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -324,7 +291,6 @@ public class ClusterDetails { * The pair `(cluster_id, spark_context_id)` is a globally unique identifier over all Spark * contexts. */ - @JsonProperty("spark_context_id") private Long sparkContextId; /** @@ -340,21 +306,18 @@ public class ClusterDetails { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** * The spec contains a snapshot of the latest user specified settings that were used to * create/edit the cluster. Note: not included in the response of the ListClusters API. */ - @JsonProperty("spec") private ClusterSpec spec; /** @@ -362,53 +325,41 @@ public class ClusterDetails { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; /** * Time (in epoch milliseconds) when the cluster creation request was received (when the cluster * entered a `PENDING` state). */ - @JsonProperty("start_time") private Long startTime; /** Current state of the cluster. */ - @JsonProperty("state") private State state; /** * A message associated with the most recent state transition (e.g., the reason why the cluster * entered a `TERMINATED` state). */ - @JsonProperty("state_message") private String stateMessage; /** Time (in epoch milliseconds) when the cluster was terminated, if applicable. */ - @JsonProperty("terminated_time") private Long terminatedTime; /** * Information about why the cluster was terminated. This field only appears when the cluster is * in a `TERMINATING` or `TERMINATED` state. */ - @JsonProperty("termination_reason") private TerminationReason terminationReason; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public ClusterDetails setAutoscale(AutoScale autoscale) { @@ -708,24 +659,6 @@ public String getPolicyId() { return policyId; } - public ClusterDetails setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterDetails setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterDetails setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -843,16 +776,6 @@ public TerminationReason getTerminationReason() { return terminationReason; } - public ClusterDetails setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterDetails setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -909,8 +832,6 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) @@ -924,7 +845,6 @@ public boolean equals(Object o) { && Objects.equals(stateMessage, that.stateMessage) && Objects.equals(terminatedTime, that.terminatedTime) && Objects.equals(terminationReason, that.terminationReason) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -965,8 +885,6 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, @@ -980,7 +898,6 @@ public int hashCode() { stateMessage, terminatedTime, terminationReason, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -1021,8 +938,6 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) @@ -1036,9 +951,136 @@ public String toString() { .add("stateMessage", stateMessage) .add("terminatedTime", terminatedTime) .add("terminationReason", terminationReason) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + ClusterDetailsPb toPb() { + ClusterDetailsPb pb = new ClusterDetailsPb(); + pb.setAutoscale(autoscale); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterCores(clusterCores); + pb.setClusterId(clusterId); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterLogStatus(clusterLogStatus); + pb.setClusterMemoryMb(clusterMemoryMb); + pb.setClusterName(clusterName); + pb.setClusterSource(clusterSource); + pb.setCreatorUserName(creatorUserName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDefaultTags(defaultTags); + pb.setDockerImage(dockerImage); + pb.setDriver(driver); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setExecutors(executors); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setJdbcPort(jdbcPort); + pb.setKind(kind); + pb.setLastRestartedTime(lastRestartedTime); + pb.setLastStateLossTime(lastStateLossTime); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkContextId(sparkContextId); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSpec(spec); + pb.setSshPublicKeys(sshPublicKeys); + pb.setStartTime(startTime); + pb.setState(state); + pb.setStateMessage(stateMessage); + pb.setTerminatedTime(terminatedTime); + pb.setTerminationReason(terminationReason); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ClusterDetails fromPb(ClusterDetailsPb pb) { + ClusterDetails model = new ClusterDetails(); + model.setAutoscale(pb.getAutoscale()); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterCores(pb.getClusterCores()); + model.setClusterId(pb.getClusterId()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterLogStatus(pb.getClusterLogStatus()); + model.setClusterMemoryMb(pb.getClusterMemoryMb()); + model.setClusterName(pb.getClusterName()); + model.setClusterSource(pb.getClusterSource()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDefaultTags(pb.getDefaultTags()); + model.setDockerImage(pb.getDockerImage()); + model.setDriver(pb.getDriver()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setExecutors(pb.getExecutors()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setJdbcPort(pb.getJdbcPort()); + model.setKind(pb.getKind()); + model.setLastRestartedTime(pb.getLastRestartedTime()); + model.setLastStateLossTime(pb.getLastStateLossTime()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkContextId(pb.getSparkContextId()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSpec(pb.getSpec()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setStartTime(pb.getStartTime()); + model.setState(pb.getState()); + model.setStateMessage(pb.getStateMessage()); + model.setTerminatedTime(pb.getTerminatedTime()); + model.setTerminationReason(pb.getTerminationReason()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ClusterDetailsSerializer extends JsonSerializer { + @Override + public void serialize(ClusterDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterDetailsDeserializer extends JsonDeserializer { + @Override + public ClusterDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterDetailsPb pb = mapper.readValue(p, ClusterDetailsPb.class); + return ClusterDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetailsPb.java new file mode 100755 index 000000000..741902fbf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetailsPb.java @@ -0,0 +1,752 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +/** Describes all of the metadata about a single Spark cluster in Databricks. */ +@Generated +class ClusterDetailsPb { + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("cluster_cores") + private Double clusterCores; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_log_status") + private LogSyncStatus clusterLogStatus; + + @JsonProperty("cluster_memory_mb") + private Long clusterMemoryMb; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("cluster_source") + private ClusterSource clusterSource; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("default_tags") + private Map defaultTags; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver") + private SparkNode driver; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("executors") + private Collection executors; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("jdbc_port") + private Long jdbcPort; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("last_restarted_time") + private Long lastRestartedTime; + + @JsonProperty("last_state_loss_time") + private Long lastStateLossTime; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_context_id") + private Long sparkContextId; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("spec") + private ClusterSpec spec; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("state") + private State state; + + @JsonProperty("state_message") + private String stateMessage; + + @JsonProperty("terminated_time") + private Long terminatedTime; + + @JsonProperty("termination_reason") + private TerminationReason terminationReason; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public ClusterDetailsPb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public ClusterDetailsPb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public ClusterDetailsPb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public ClusterDetailsPb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public ClusterDetailsPb setClusterCores(Double clusterCores) { + this.clusterCores = clusterCores; + return this; + } + + public Double getClusterCores() { + return clusterCores; + } + + public ClusterDetailsPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterDetailsPb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public ClusterDetailsPb setClusterLogStatus(LogSyncStatus clusterLogStatus) { + this.clusterLogStatus = clusterLogStatus; + return this; + } + + public LogSyncStatus getClusterLogStatus() { + return clusterLogStatus; + } + + public ClusterDetailsPb setClusterMemoryMb(Long clusterMemoryMb) { + this.clusterMemoryMb = clusterMemoryMb; + return this; + } + + public Long getClusterMemoryMb() { + return clusterMemoryMb; + } + + public ClusterDetailsPb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public ClusterDetailsPb setClusterSource(ClusterSource clusterSource) { + this.clusterSource = clusterSource; + return this; + } + + public ClusterSource getClusterSource() { + return clusterSource; + } + + public ClusterDetailsPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public ClusterDetailsPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public ClusterDetailsPb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public ClusterDetailsPb setDefaultTags(Map defaultTags) { + this.defaultTags = defaultTags; + return this; + } + + public Map getDefaultTags() { + return defaultTags; + } + + public ClusterDetailsPb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public ClusterDetailsPb setDriver(SparkNode driver) { + this.driver = driver; + return this; + } + + public SparkNode getDriver() { + return driver; + } + + public ClusterDetailsPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public ClusterDetailsPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public ClusterDetailsPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public ClusterDetailsPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public ClusterDetailsPb setExecutors(Collection executors) { + this.executors = executors; + return this; + } + + public Collection getExecutors() { + return executors; + } + + public ClusterDetailsPb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public ClusterDetailsPb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public ClusterDetailsPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public ClusterDetailsPb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public ClusterDetailsPb setJdbcPort(Long jdbcPort) { + this.jdbcPort = jdbcPort; + return this; + } + + public Long getJdbcPort() { + return jdbcPort; + } + + public ClusterDetailsPb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public ClusterDetailsPb setLastRestartedTime(Long lastRestartedTime) { + this.lastRestartedTime = lastRestartedTime; + return this; + } + + public Long getLastRestartedTime() { + return lastRestartedTime; + } + + public ClusterDetailsPb setLastStateLossTime(Long lastStateLossTime) { + this.lastStateLossTime = lastStateLossTime; + return this; + } + + public Long getLastStateLossTime() { + return lastStateLossTime; + } + + public ClusterDetailsPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public ClusterDetailsPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public ClusterDetailsPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public ClusterDetailsPb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public ClusterDetailsPb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public ClusterDetailsPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public ClusterDetailsPb setSparkContextId(Long sparkContextId) { + this.sparkContextId = sparkContextId; + return this; + } + + public Long getSparkContextId() { + return sparkContextId; + } + + public ClusterDetailsPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public ClusterDetailsPb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public ClusterDetailsPb setSpec(ClusterSpec spec) { + this.spec = spec; + return this; + } + + public ClusterSpec getSpec() { + return spec; + } + + public ClusterDetailsPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public ClusterDetailsPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public ClusterDetailsPb setState(State state) { + this.state = state; + return this; + } + + public State getState() { + return state; + } + + public ClusterDetailsPb setStateMessage(String stateMessage) { + this.stateMessage = stateMessage; + return this; + } + + public String getStateMessage() { + return stateMessage; + } + + public ClusterDetailsPb setTerminatedTime(Long terminatedTime) { + this.terminatedTime = terminatedTime; + return this; + } + + public Long getTerminatedTime() { + return terminatedTime; + } + + public ClusterDetailsPb setTerminationReason(TerminationReason terminationReason) { + this.terminationReason = terminationReason; + return this; + } + + public TerminationReason getTerminationReason() { + return terminationReason; + } + + public ClusterDetailsPb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public ClusterDetailsPb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterDetailsPb that = (ClusterDetailsPb) o; + return Objects.equals(autoscale, that.autoscale) + && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterCores, that.clusterCores) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterLogStatus, that.clusterLogStatus) + && Objects.equals(clusterMemoryMb, that.clusterMemoryMb) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(clusterSource, that.clusterSource) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(defaultTags, that.defaultTags) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driver, that.driver) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(executors, that.executors) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(jdbcPort, that.jdbcPort) + && Objects.equals(kind, that.kind) + && Objects.equals(lastRestartedTime, that.lastRestartedTime) + && Objects.equals(lastStateLossTime, that.lastStateLossTime) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkContextId, that.sparkContextId) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(spec, that.spec) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(startTime, that.startTime) + && Objects.equals(state, that.state) + && Objects.equals(stateMessage, that.stateMessage) + && Objects.equals(terminatedTime, that.terminatedTime) + && Objects.equals(terminationReason, that.terminationReason) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscale, + autoterminationMinutes, + awsAttributes, + azureAttributes, + clusterCores, + clusterId, + clusterLogConf, + clusterLogStatus, + clusterMemoryMb, + clusterName, + clusterSource, + creatorUserName, + customTags, + dataSecurityMode, + defaultTags, + dockerImage, + driver, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + executors, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + jdbcPort, + kind, + lastRestartedTime, + lastStateLossTime, + nodeTypeId, + numWorkers, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkContextId, + sparkEnvVars, + sparkVersion, + spec, + sshPublicKeys, + startTime, + state, + stateMessage, + terminatedTime, + terminationReason, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ClusterDetailsPb.class) + .add("autoscale", autoscale) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterCores", clusterCores) + .add("clusterId", clusterId) + .add("clusterLogConf", clusterLogConf) + .add("clusterLogStatus", clusterLogStatus) + .add("clusterMemoryMb", clusterMemoryMb) + .add("clusterName", clusterName) + .add("clusterSource", clusterSource) + .add("creatorUserName", creatorUserName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("defaultTags", defaultTags) + .add("dockerImage", dockerImage) + .add("driver", driver) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("executors", executors) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("jdbcPort", jdbcPort) + .add("kind", kind) + .add("lastRestartedTime", lastRestartedTime) + .add("lastStateLossTime", lastStateLossTime) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkContextId", sparkContextId) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("spec", spec) + .add("sshPublicKeys", sshPublicKeys) + .add("startTime", startTime) + .add("state", state) + .add("stateMessage", stateMessage) + .add("terminatedTime", terminatedTime) + .add("terminationReason", terminationReason) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java index c6bf7cd5c..bc2479aad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterEvent.ClusterEventSerializer.class) +@JsonDeserialize(using = ClusterEvent.ClusterEventDeserializer.class) public class ClusterEvent { /** */ - @JsonProperty("cluster_id") private String clusterId; /** */ - @JsonProperty("data_plane_event_details") private DataPlaneEventDetails dataPlaneEventDetails; /** */ - @JsonProperty("details") private EventDetails details; /** * The timestamp when the event occurred, stored as the number of milliseconds since the Unix * epoch. If not provided, this will be assigned by the Timeline service. */ - @JsonProperty("timestamp") private Long timestamp; /** */ - @JsonProperty("type") private EventType typeValue; public ClusterEvent setClusterId(String clusterId) { @@ -104,4 +110,45 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + ClusterEventPb toPb() { + ClusterEventPb pb = new ClusterEventPb(); + pb.setClusterId(clusterId); + pb.setDataPlaneEventDetails(dataPlaneEventDetails); + pb.setDetails(details); + pb.setTimestamp(timestamp); + pb.setType(typeValue); + + return pb; + } + + static ClusterEvent fromPb(ClusterEventPb pb) { + ClusterEvent model = new ClusterEvent(); + model.setClusterId(pb.getClusterId()); + model.setDataPlaneEventDetails(pb.getDataPlaneEventDetails()); + model.setDetails(pb.getDetails()); + model.setTimestamp(pb.getTimestamp()); + model.setType(pb.getType()); + + return model; + } + + public static class ClusterEventSerializer extends JsonSerializer { + @Override + public void serialize(ClusterEvent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterEventPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterEventDeserializer extends JsonDeserializer { + @Override + public ClusterEvent deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterEventPb pb = mapper.readValue(p, ClusterEventPb.class); + return ClusterEvent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEventPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEventPb.java new file mode 100755 index 000000000..be3d172e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEventPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterEventPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("data_plane_event_details") + private DataPlaneEventDetails dataPlaneEventDetails; + + @JsonProperty("details") + private EventDetails details; + + @JsonProperty("timestamp") + private Long timestamp; + + @JsonProperty("type") + private EventType typeValue; + + public ClusterEventPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterEventPb setDataPlaneEventDetails(DataPlaneEventDetails dataPlaneEventDetails) { + this.dataPlaneEventDetails = dataPlaneEventDetails; + return this; + } + + public DataPlaneEventDetails getDataPlaneEventDetails() { + return dataPlaneEventDetails; + } + + public ClusterEventPb setDetails(EventDetails details) { + this.details = details; + return this; + } + + public EventDetails getDetails() { + return details; + } + + public ClusterEventPb setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + public Long getTimestamp() { + return timestamp; + } + + public ClusterEventPb setType(EventType typeValue) { + this.typeValue = typeValue; + return this; + } + + public EventType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterEventPb that = (ClusterEventPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(dataPlaneEventDetails, that.dataPlaneEventDetails) + && Objects.equals(details, that.details) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, dataPlaneEventDetails, details, timestamp, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ClusterEventPb.class) + .add("clusterId", clusterId) + .add("dataPlaneEventDetails", dataPlaneEventDetails) + .add("details", details) + .add("timestamp", timestamp) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatuses.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatuses.java index ae8b0a55e..e6f8d8e6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatuses.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatuses.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterLibraryStatuses.ClusterLibraryStatusesSerializer.class) +@JsonDeserialize(using = ClusterLibraryStatuses.ClusterLibraryStatusesDeserializer.class) public class ClusterLibraryStatuses { /** Unique identifier for the cluster. */ - @JsonProperty("cluster_id") private String clusterId; /** Status of all libraries on the cluster. */ - @JsonProperty("library_statuses") private Collection libraryStatuses; public ClusterLibraryStatuses setClusterId(String clusterId) { @@ -57,4 +66,43 @@ public String toString() { .add("libraryStatuses", libraryStatuses) .toString(); } + + ClusterLibraryStatusesPb toPb() { + ClusterLibraryStatusesPb pb = new ClusterLibraryStatusesPb(); + pb.setClusterId(clusterId); + pb.setLibraryStatuses(libraryStatuses); + + return pb; + } + + static ClusterLibraryStatuses fromPb(ClusterLibraryStatusesPb pb) { + ClusterLibraryStatuses model = new ClusterLibraryStatuses(); + model.setClusterId(pb.getClusterId()); + model.setLibraryStatuses(pb.getLibraryStatuses()); + + return model; + } + + public static class ClusterLibraryStatusesSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterLibraryStatuses value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterLibraryStatusesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterLibraryStatusesDeserializer + extends JsonDeserializer { + @Override + public ClusterLibraryStatuses deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterLibraryStatusesPb pb = mapper.readValue(p, ClusterLibraryStatusesPb.class); + return ClusterLibraryStatuses.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatusesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatusesPb.java new file mode 100755 index 000000000..fd136491e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLibraryStatusesPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterLibraryStatusesPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("library_statuses") + private Collection libraryStatuses; + + public ClusterLibraryStatusesPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterLibraryStatusesPb setLibraryStatuses( + Collection libraryStatuses) { + this.libraryStatuses = libraryStatuses; + return this; + } + + public Collection getLibraryStatuses() { + return libraryStatuses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterLibraryStatusesPb that = (ClusterLibraryStatusesPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(libraryStatuses, that.libraryStatuses); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, libraryStatuses); + } + + @Override + public String toString() { + return new ToStringer(ClusterLibraryStatusesPb.class) + .add("clusterId", clusterId) + .add("libraryStatuses", libraryStatuses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java index e22885f56..99180fb04 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Cluster log delivery config */ @Generated +@JsonSerialize(using = ClusterLogConf.ClusterLogConfSerializer.class) +@JsonDeserialize(using = ClusterLogConf.ClusterLogConfDeserializer.class) public class ClusterLogConf { /** * destination needs to be provided. e.g. `{ "dbfs" : { "destination" : "dbfs:/home/cluster_log" } * }` */ - @JsonProperty("dbfs") private DbfsStorageInfo dbfs; /** @@ -23,14 +33,12 @@ public class ClusterLogConf { * is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has * permission to write data to the s3 destination. */ - @JsonProperty("s3") private S3StorageInfo s3; /** * destination needs to be provided, e.g. `{ "volumes": { "destination": * "/Volumes/catalog/schema/volume/cluster_log" } }` */ - @JsonProperty("volumes") private VolumesStorageInfo volumes; public ClusterLogConf setDbfs(DbfsStorageInfo dbfs) { @@ -83,4 +91,42 @@ public String toString() { .add("volumes", volumes) .toString(); } + + ClusterLogConfPb toPb() { + ClusterLogConfPb pb = new ClusterLogConfPb(); + pb.setDbfs(dbfs); + pb.setS3(s3); + pb.setVolumes(volumes); + + return pb; + } + + static ClusterLogConf fromPb(ClusterLogConfPb pb) { + ClusterLogConf model = new ClusterLogConf(); + model.setDbfs(pb.getDbfs()); + model.setS3(pb.getS3()); + model.setVolumes(pb.getVolumes()); + + return model; + } + + public static class ClusterLogConfSerializer extends JsonSerializer { + @Override + public void serialize(ClusterLogConf value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterLogConfPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterLogConfDeserializer extends JsonDeserializer { + @Override + public ClusterLogConf deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterLogConfPb pb = mapper.readValue(p, ClusterLogConfPb.class); + return ClusterLogConf.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConfPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConfPb.java new file mode 100755 index 000000000..94866885e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConfPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Cluster log delivery config */ +@Generated +class ClusterLogConfPb { + @JsonProperty("dbfs") + private DbfsStorageInfo dbfs; + + @JsonProperty("s3") + private S3StorageInfo s3; + + @JsonProperty("volumes") + private VolumesStorageInfo volumes; + + public ClusterLogConfPb setDbfs(DbfsStorageInfo dbfs) { + this.dbfs = dbfs; + return this; + } + + public DbfsStorageInfo getDbfs() { + return dbfs; + } + + public ClusterLogConfPb setS3(S3StorageInfo s3) { + this.s3 = s3; + return this; + } + + public S3StorageInfo getS3() { + return s3; + } + + public ClusterLogConfPb setVolumes(VolumesStorageInfo volumes) { + this.volumes = volumes; + return this; + } + + public VolumesStorageInfo getVolumes() { + return volumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterLogConfPb that = (ClusterLogConfPb) o; + return Objects.equals(dbfs, that.dbfs) + && Objects.equals(s3, that.s3) + && Objects.equals(volumes, that.volumes); + } + + @Override + public int hashCode() { + return Objects.hash(dbfs, s3, volumes); + } + + @Override + public String toString() { + return new ToStringer(ClusterLogConfPb.class) + .add("dbfs", dbfs) + .add("s3", s3) + .add("volumes", volumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java index a96fd99fb..de06dc2f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPermission.ClusterPermissionSerializer.class) +@JsonDeserialize(using = ClusterPermission.ClusterPermissionDeserializer.class) public class ClusterPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; public ClusterPermission setInherited(Boolean inherited) { @@ -72,4 +80,42 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ClusterPermissionPb toPb() { + ClusterPermissionPb pb = new ClusterPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ClusterPermission fromPb(ClusterPermissionPb pb) { + ClusterPermission model = new ClusterPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ClusterPermissionSerializer extends JsonSerializer { + @Override + public void serialize(ClusterPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPermissionDeserializer extends JsonDeserializer { + @Override + public ClusterPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPermissionPb pb = mapper.readValue(p, ClusterPermissionPb.class); + return ClusterPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionPb.java new file mode 100755 index 000000000..959d097cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private ClusterPermissionLevel permissionLevel; + + public ClusterPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public ClusterPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public ClusterPermissionPb setPermissionLevel(ClusterPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPermissionPb that = (ClusterPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ClusterPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissions.java index b56ab6c6f..dda567134 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPermissions.ClusterPermissionsSerializer.class) +@JsonDeserialize(using = ClusterPermissions.ClusterPermissionsDeserializer.class) public class ClusterPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public ClusterPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + ClusterPermissionsPb toPb() { + ClusterPermissionsPb pb = new ClusterPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static ClusterPermissions fromPb(ClusterPermissionsPb pb) { + ClusterPermissions model = new ClusterPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class ClusterPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(ClusterPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPermissionsDeserializer extends JsonDeserializer { + @Override + public ClusterPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPermissionsPb pb = mapper.readValue(p, ClusterPermissionsPb.class); + return ClusterPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java index 33db8d0a8..c8ba24698 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPermissionsDescription.ClusterPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = ClusterPermissionsDescription.ClusterPermissionsDescriptionDeserializer.class) public class ClusterPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; public ClusterPermissionsDescription setDescription(String description) { @@ -56,4 +66,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ClusterPermissionsDescriptionPb toPb() { + ClusterPermissionsDescriptionPb pb = new ClusterPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ClusterPermissionsDescription fromPb(ClusterPermissionsDescriptionPb pb) { + ClusterPermissionsDescription model = new ClusterPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ClusterPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public ClusterPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPermissionsDescriptionPb pb = + mapper.readValue(p, ClusterPermissionsDescriptionPb.class); + return ClusterPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescriptionPb.java new file mode 100755 index 000000000..58e6b74b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private ClusterPermissionLevel permissionLevel; + + public ClusterPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ClusterPermissionsDescriptionPb setPermissionLevel( + ClusterPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPermissionsDescriptionPb that = (ClusterPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ClusterPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsPb.java new file mode 100755 index 000000000..3a157343b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public ClusterPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ClusterPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ClusterPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPermissionsPb that = (ClusterPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(ClusterPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequest.java index de517f43e..a0327c79f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPermissionsRequest.ClusterPermissionsRequestSerializer.class) +@JsonDeserialize(using = ClusterPermissionsRequest.ClusterPermissionsRequestDeserializer.class) public class ClusterPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The cluster for which to get or manage permissions. */ - @JsonIgnore private String clusterId; + private String clusterId; public ClusterPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("clusterId", clusterId) .toString(); } + + ClusterPermissionsRequestPb toPb() { + ClusterPermissionsRequestPb pb = new ClusterPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setClusterId(clusterId); + + return pb; + } + + static ClusterPermissionsRequest fromPb(ClusterPermissionsRequestPb pb) { + ClusterPermissionsRequest model = new ClusterPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class ClusterPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public ClusterPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPermissionsRequestPb pb = mapper.readValue(p, ClusterPermissionsRequestPb.class); + return ClusterPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequestPb.java new file mode 100755 index 000000000..1ce7d6512 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String clusterId; + + public ClusterPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ClusterPermissionsRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPermissionsRequestPb that = (ClusterPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, clusterId); + } + + @Override + public String toString() { + return new ToStringer(ClusterPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("clusterId", clusterId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java index ec3d88c43..0e484b563 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java @@ -21,7 +21,7 @@ public CreatePolicyResponse create(CreatePolicy request) { String path = "/api/2.0/policies/clusters/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreatePolicyResponse.class); @@ -35,7 +35,7 @@ public void delete(DeletePolicy request) { String path = "/api/2.0/policies/clusters/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeletePolicyResponse.class); @@ -49,7 +49,7 @@ public void edit(EditPolicy request) { String path = "/api/2.0/policies/clusters/edit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditPolicyResponse.class); @@ -63,7 +63,7 @@ public Policy get(GetClusterPolicyRequest request) { String path = "/api/2.0/policies/clusters/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Policy.class); } catch (IOException e) { @@ -80,7 +80,7 @@ public GetClusterPolicyPermissionLevelsResponse getPermissionLevels( request.getClusterPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetClusterPolicyPermissionLevelsResponse.class); } catch (IOException e) { @@ -94,7 +94,7 @@ public ClusterPolicyPermissions getPermissions(GetClusterPolicyPermissionsReques String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ClusterPolicyPermissions.class); } catch (IOException e) { @@ -107,7 +107,7 @@ public ListPoliciesResponse list(ListClusterPoliciesRequest request) { String path = "/api/2.0/policies/clusters/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListPoliciesResponse.class); } catch (IOException e) { @@ -121,7 +121,7 @@ public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest r String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ClusterPolicyPermissions.class); @@ -136,7 +136,7 @@ public ClusterPolicyPermissions updatePermissions(ClusterPolicyPermissionsReques String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ClusterPolicyPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java index 651b6e1bb..1ad0dd43e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ClusterPolicyAccessControlRequest.ClusterPolicyAccessControlRequestSerializer.class) +@JsonDeserialize( + using = ClusterPolicyAccessControlRequest.ClusterPolicyAccessControlRequestDeserializer.class) public class ClusterPolicyAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ClusterPolicyAccessControlRequest setGroupName(String groupName) { @@ -87,4 +96,48 @@ public String toString() { .add("userName", userName) .toString(); } + + ClusterPolicyAccessControlRequestPb toPb() { + ClusterPolicyAccessControlRequestPb pb = new ClusterPolicyAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ClusterPolicyAccessControlRequest fromPb(ClusterPolicyAccessControlRequestPb pb) { + ClusterPolicyAccessControlRequest model = new ClusterPolicyAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ClusterPolicyAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyAccessControlRequestPb pb = + mapper.readValue(p, ClusterPolicyAccessControlRequestPb.class); + return ClusterPolicyAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequestPb.java new file mode 100755 index 000000000..0d7906556 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterPolicyAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private ClusterPolicyPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ClusterPolicyAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ClusterPolicyAccessControlRequestPb setPermissionLevel( + ClusterPolicyPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPolicyPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ClusterPolicyAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ClusterPolicyAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyAccessControlRequestPb that = (ClusterPolicyAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponse.java index 58f262a8c..746303610 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponse.java @@ -4,30 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ClusterPolicyAccessControlResponse.ClusterPolicyAccessControlResponseSerializer.class) +@JsonDeserialize( + using = ClusterPolicyAccessControlResponse.ClusterPolicyAccessControlResponseDeserializer.class) public class ClusterPolicyAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ClusterPolicyAccessControlResponse setAllPermissions( @@ -103,4 +111,50 @@ public String toString() { .add("userName", userName) .toString(); } + + ClusterPolicyAccessControlResponsePb toPb() { + ClusterPolicyAccessControlResponsePb pb = new ClusterPolicyAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ClusterPolicyAccessControlResponse fromPb(ClusterPolicyAccessControlResponsePb pb) { + ClusterPolicyAccessControlResponse model = new ClusterPolicyAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ClusterPolicyAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyAccessControlResponsePb pb = + mapper.readValue(p, ClusterPolicyAccessControlResponsePb.class); + return ClusterPolicyAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponsePb.java new file mode 100755 index 000000000..9b18a97b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPolicyAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ClusterPolicyAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public ClusterPolicyAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ClusterPolicyAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ClusterPolicyAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ClusterPolicyAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyAccessControlResponsePb that = (ClusterPolicyAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java index fa2715b7f..e03fe75ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPolicyPermission.ClusterPolicyPermissionSerializer.class) +@JsonDeserialize(using = ClusterPolicyPermission.ClusterPolicyPermissionDeserializer.class) public class ClusterPolicyPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; public ClusterPolicyPermission setInherited(Boolean inherited) { @@ -72,4 +80,45 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ClusterPolicyPermissionPb toPb() { + ClusterPolicyPermissionPb pb = new ClusterPolicyPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ClusterPolicyPermission fromPb(ClusterPolicyPermissionPb pb) { + ClusterPolicyPermission model = new ClusterPolicyPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ClusterPolicyPermissionSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyPermissionDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyPermissionPb pb = mapper.readValue(p, ClusterPolicyPermissionPb.class); + return ClusterPolicyPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionPb.java new file mode 100755 index 000000000..1c6946de3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPolicyPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private ClusterPolicyPermissionLevel permissionLevel; + + public ClusterPolicyPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public ClusterPolicyPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public ClusterPolicyPermissionPb setPermissionLevel( + ClusterPolicyPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPolicyPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyPermissionPb that = (ClusterPolicyPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissions.java index 2aed3d999..6e8b8a989 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterPolicyPermissions.ClusterPolicyPermissionsSerializer.class) +@JsonDeserialize(using = ClusterPolicyPermissions.ClusterPolicyPermissionsDeserializer.class) public class ClusterPolicyPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public ClusterPolicyPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + ClusterPolicyPermissionsPb toPb() { + ClusterPolicyPermissionsPb pb = new ClusterPolicyPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static ClusterPolicyPermissions fromPb(ClusterPolicyPermissionsPb pb) { + ClusterPolicyPermissions model = new ClusterPolicyPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class ClusterPolicyPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyPermissionsDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyPermissionsPb pb = mapper.readValue(p, ClusterPolicyPermissionsPb.class); + return ClusterPolicyPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java index 169d11d86..e6f66337c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java @@ -4,17 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ClusterPolicyPermissionsDescription.ClusterPolicyPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = + ClusterPolicyPermissionsDescription.ClusterPolicyPermissionsDescriptionDeserializer.class) public class ClusterPolicyPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; public ClusterPolicyPermissionsDescription setDescription(String description) { @@ -57,4 +69,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ClusterPolicyPermissionsDescriptionPb toPb() { + ClusterPolicyPermissionsDescriptionPb pb = new ClusterPolicyPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ClusterPolicyPermissionsDescription fromPb(ClusterPolicyPermissionsDescriptionPb pb) { + ClusterPolicyPermissionsDescription model = new ClusterPolicyPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ClusterPolicyPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyPermissionsDescription deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyPermissionsDescriptionPb pb = + mapper.readValue(p, ClusterPolicyPermissionsDescriptionPb.class); + return ClusterPolicyPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescriptionPb.java new file mode 100755 index 000000000..6c47bcf1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterPolicyPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private ClusterPolicyPermissionLevel permissionLevel; + + public ClusterPolicyPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ClusterPolicyPermissionsDescriptionPb setPermissionLevel( + ClusterPolicyPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ClusterPolicyPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyPermissionsDescriptionPb that = (ClusterPolicyPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsPb.java new file mode 100755 index 000000000..45b0187b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPolicyPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public ClusterPolicyPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ClusterPolicyPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ClusterPolicyPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyPermissionsPb that = (ClusterPolicyPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequest.java index b10d9d156..499226e8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequest.java @@ -4,19 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ClusterPolicyPermissionsRequest.ClusterPolicyPermissionsRequestSerializer.class) +@JsonDeserialize( + using = ClusterPolicyPermissionsRequest.ClusterPolicyPermissionsRequestDeserializer.class) public class ClusterPolicyPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The cluster policy for which to get or manage permissions. */ - @JsonIgnore private String clusterPolicyId; + private String clusterPolicyId; public ClusterPolicyPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +69,44 @@ public String toString() { .add("clusterPolicyId", clusterPolicyId) .toString(); } + + ClusterPolicyPermissionsRequestPb toPb() { + ClusterPolicyPermissionsRequestPb pb = new ClusterPolicyPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setClusterPolicyId(clusterPolicyId); + + return pb; + } + + static ClusterPolicyPermissionsRequest fromPb(ClusterPolicyPermissionsRequestPb pb) { + ClusterPolicyPermissionsRequest model = new ClusterPolicyPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setClusterPolicyId(pb.getClusterPolicyId()); + + return model; + } + + public static class ClusterPolicyPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterPolicyPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterPolicyPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterPolicyPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public ClusterPolicyPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterPolicyPermissionsRequestPb pb = + mapper.readValue(p, ClusterPolicyPermissionsRequestPb.class); + return ClusterPolicyPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequestPb.java new file mode 100755 index 000000000..032202977 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterPolicyPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String clusterPolicyId; + + public ClusterPolicyPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ClusterPolicyPermissionsRequestPb setClusterPolicyId(String clusterPolicyId) { + this.clusterPolicyId = clusterPolicyId; + return this; + } + + public String getClusterPolicyId() { + return clusterPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterPolicyPermissionsRequestPb that = (ClusterPolicyPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(clusterPolicyId, that.clusterPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, clusterPolicyId); + } + + @Override + public String toString() { + return new ToStringer(ClusterPolicyPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("clusterPolicyId", clusterPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChange.java index a97120951..84f6f0e10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChange.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,9 +21,10 @@ * policy. */ @Generated +@JsonSerialize(using = ClusterSettingsChange.ClusterSettingsChangeSerializer.class) +@JsonDeserialize(using = ClusterSettingsChange.ClusterSettingsChangeDeserializer.class) public class ClusterSettingsChange { /** The field where this change would be made. */ - @JsonProperty("field") private String field; /** @@ -22,7 +32,6 @@ public class ClusterSettingsChange { * string) converted to a string. This is intended to be read by a human. The typed new value of * this field can be retrieved by reading the settings field in the API response. */ - @JsonProperty("new_value") private String newValue; /** @@ -30,7 +39,6 @@ public class ClusterSettingsChange { * boolean, or a string) converted to a string. This is intended to be read by a human. The type * of the field can be retrieved by reading the settings field in the API response. */ - @JsonProperty("previous_value") private String previousValue; public ClusterSettingsChange setField(String field) { @@ -83,4 +91,45 @@ public String toString() { .add("previousValue", previousValue) .toString(); } + + ClusterSettingsChangePb toPb() { + ClusterSettingsChangePb pb = new ClusterSettingsChangePb(); + pb.setField(field); + pb.setNewValue(newValue); + pb.setPreviousValue(previousValue); + + return pb; + } + + static ClusterSettingsChange fromPb(ClusterSettingsChangePb pb) { + ClusterSettingsChange model = new ClusterSettingsChange(); + model.setField(pb.getField()); + model.setNewValue(pb.getNewValue()); + model.setPreviousValue(pb.getPreviousValue()); + + return model; + } + + public static class ClusterSettingsChangeSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterSettingsChange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterSettingsChangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterSettingsChangeDeserializer + extends JsonDeserializer { + @Override + public ClusterSettingsChange deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterSettingsChangePb pb = mapper.readValue(p, ClusterSettingsChangePb.class); + return ClusterSettingsChange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChangePb.java new file mode 100755 index 000000000..5b695da57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSettingsChangePb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Represents a change to the cluster settings required for the cluster to become compliant with its + * policy. + */ +@Generated +class ClusterSettingsChangePb { + @JsonProperty("field") + private String field; + + @JsonProperty("new_value") + private String newValue; + + @JsonProperty("previous_value") + private String previousValue; + + public ClusterSettingsChangePb setField(String field) { + this.field = field; + return this; + } + + public String getField() { + return field; + } + + public ClusterSettingsChangePb setNewValue(String newValue) { + this.newValue = newValue; + return this; + } + + public String getNewValue() { + return newValue; + } + + public ClusterSettingsChangePb setPreviousValue(String previousValue) { + this.previousValue = previousValue; + return this; + } + + public String getPreviousValue() { + return previousValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterSettingsChangePb that = (ClusterSettingsChangePb) o; + return Objects.equals(field, that.field) + && Objects.equals(newValue, that.newValue) + && Objects.equals(previousValue, that.previousValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, newValue, previousValue); + } + + @Override + public String toString() { + return new ToStringer(ClusterSettingsChangePb.class) + .add("field", field) + .add("newValue", newValue) + .add("previousValue", previousValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSize.java index 6f3b6efdf..d12fca04d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSize.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSize.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterSize.ClusterSizeSerializer.class) +@JsonDeserialize(using = ClusterSize.ClusterSizeDeserializer.class) public class ClusterSize { /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -26,7 +36,6 @@ public class ClusterSize { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; public ClusterSize setAutoscale(AutoScale autoscale) { @@ -67,4 +76,39 @@ public String toString() { .add("numWorkers", numWorkers) .toString(); } + + ClusterSizePb toPb() { + ClusterSizePb pb = new ClusterSizePb(); + pb.setAutoscale(autoscale); + pb.setNumWorkers(numWorkers); + + return pb; + } + + static ClusterSize fromPb(ClusterSizePb pb) { + ClusterSize model = new ClusterSize(); + model.setAutoscale(pb.getAutoscale()); + model.setNumWorkers(pb.getNumWorkers()); + + return model; + } + + public static class ClusterSizeSerializer extends JsonSerializer { + @Override + public void serialize(ClusterSize value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterSizePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterSizeDeserializer extends JsonDeserializer { + @Override + public ClusterSize deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterSizePb pb = mapper.readValue(p, ClusterSizePb.class); + return ClusterSize.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSizePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSizePb.java new file mode 100755 index 000000000..1d9041df6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSizePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterSizePb { + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("num_workers") + private Long numWorkers; + + public ClusterSizePb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public ClusterSizePb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterSizePb that = (ClusterSizePb) o; + return Objects.equals(autoscale, that.autoscale) && Objects.equals(numWorkers, that.numWorkers); + } + + @Override + public int hashCode() { + return Objects.hash(autoscale, numWorkers); + } + + @Override + public String toString() { + return new ToStringer(ClusterSizePb.class) + .add("autoscale", autoscale) + .add("numWorkers", numWorkers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index 78c7ddbfa..b1389ec5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -14,19 +23,19 @@ * cluster. */ @Generated +@JsonSerialize(using = ClusterSpec.ClusterSpecSerializer.class) +@JsonDeserialize(using = ClusterSpec.ClusterSpecDeserializer.class) public class ClusterSpec { /** * When set to true, fixed and default values from the policy will be used for fields that are * omitted. When set to false, only fixed values from the policy will be applied. */ - @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -35,21 +44,18 @@ public class ClusterSpec { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** @@ -59,7 +65,6 @@ public class ClusterSpec { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** @@ -67,7 +72,6 @@ public class ClusterSpec { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** @@ -79,7 +83,6 @@ public class ClusterSpec { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -109,18 +112,15 @@ public class ClusterSpec { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -131,7 +131,6 @@ public class ClusterSpec { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -139,18 +138,15 @@ public class ClusterSpec { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -158,11 +154,9 @@ public class ClusterSpec { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -171,7 +165,6 @@ public class ClusterSpec { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** @@ -189,7 +182,6 @@ public class ClusterSpec { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** @@ -198,7 +190,6 @@ public class ClusterSpec { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -211,21 +202,11 @@ public class ClusterSpec { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -235,11 +216,9 @@ public class ClusterSpec { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -247,7 +226,6 @@ public class ClusterSpec { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -263,14 +241,12 @@ public class ClusterSpec { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** @@ -278,24 +254,17 @@ public class ClusterSpec { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public ClusterSpec setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { @@ -496,24 +465,6 @@ public String getPolicyId() { return policyId; } - public ClusterSpec setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterSpec setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterSpec setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -568,15 +519,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public ClusterSpec setTotalInitialRemoteShuffleDiskSize(Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterSpec setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -622,15 +564,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -660,15 +599,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -698,17 +634,105 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + ClusterSpecPb toPb() { + ClusterSpecPb pb = new ClusterSpecPb(); + pb.setApplyPolicyDefaultValues(applyPolicyDefaultValues); + pb.setAutoscale(autoscale); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterName(clusterName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDockerImage(dockerImage); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setKind(kind); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSshPublicKeys(sshPublicKeys); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ClusterSpec fromPb(ClusterSpecPb pb) { + ClusterSpec model = new ClusterSpec(); + model.setApplyPolicyDefaultValues(pb.getApplyPolicyDefaultValues()); + model.setAutoscale(pb.getAutoscale()); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterName(pb.getClusterName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDockerImage(pb.getDockerImage()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setKind(pb.getKind()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ClusterSpecSerializer extends JsonSerializer { + @Override + public void serialize(ClusterSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterSpecDeserializer extends JsonDeserializer { + @Override + public ClusterSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterSpecPb pb = mapper.readValue(p, ClusterSpecPb.class); + return ClusterSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpecPb.java new file mode 100755 index 000000000..9964cdd00 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpecPb.java @@ -0,0 +1,485 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +/** + * Contains a snapshot of the latest user specified settings that were used to create/edit the + * cluster. + */ +@Generated +class ClusterSpecPb { + @JsonProperty("apply_policy_default_values") + private Boolean applyPolicyDefaultValues; + + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public ClusterSpecPb setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { + this.applyPolicyDefaultValues = applyPolicyDefaultValues; + return this; + } + + public Boolean getApplyPolicyDefaultValues() { + return applyPolicyDefaultValues; + } + + public ClusterSpecPb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public ClusterSpecPb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public ClusterSpecPb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public ClusterSpecPb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public ClusterSpecPb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public ClusterSpecPb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public ClusterSpecPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public ClusterSpecPb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public ClusterSpecPb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public ClusterSpecPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public ClusterSpecPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public ClusterSpecPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public ClusterSpecPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public ClusterSpecPb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public ClusterSpecPb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public ClusterSpecPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public ClusterSpecPb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public ClusterSpecPb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public ClusterSpecPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public ClusterSpecPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public ClusterSpecPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public ClusterSpecPb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public ClusterSpecPb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public ClusterSpecPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public ClusterSpecPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public ClusterSpecPb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public ClusterSpecPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public ClusterSpecPb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public ClusterSpecPb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterSpecPb that = (ClusterSpecPb) o; + return Objects.equals(applyPolicyDefaultValues, that.applyPolicyDefaultValues) + && Objects.equals(autoscale, that.autoscale) + && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(kind, that.kind) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + applyPolicyDefaultValues, + autoscale, + autoterminationMinutes, + awsAttributes, + azureAttributes, + clusterLogConf, + clusterName, + customTags, + dataSecurityMode, + dockerImage, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + kind, + nodeTypeId, + numWorkers, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkEnvVars, + sparkVersion, + sshPublicKeys, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ClusterSpecPb.class) + .add("applyPolicyDefaultValues", applyPolicyDefaultValues) + .add("autoscale", autoscale) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterLogConf", clusterLogConf) + .add("clusterName", clusterName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("dockerImage", dockerImage) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("kind", kind) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("sshPublicKeys", sshPublicKeys) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java index aa7d9e980..ea0b40eac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get status */ @Generated +@JsonSerialize(using = ClusterStatus.ClusterStatusSerializer.class) +@JsonDeserialize(using = ClusterStatus.ClusterStatusDeserializer.class) public class ClusterStatus { /** Unique identifier of the cluster whose status should be retrieved. */ - @JsonIgnore - @QueryParam("cluster_id") private String clusterId; public ClusterStatus setClusterId(String clusterId) { @@ -42,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(ClusterStatus.class).add("clusterId", clusterId).toString(); } + + ClusterStatusPb toPb() { + ClusterStatusPb pb = new ClusterStatusPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static ClusterStatus fromPb(ClusterStatusPb pb) { + ClusterStatus model = new ClusterStatus(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class ClusterStatusSerializer extends JsonSerializer { + @Override + public void serialize(ClusterStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterStatusDeserializer extends JsonDeserializer { + @Override + public ClusterStatus deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterStatusPb pb = mapper.readValue(p, ClusterStatusPb.class); + return ClusterStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusPb.java new file mode 100755 index 000000000..7a18cb1da --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get status */ +@Generated +class ClusterStatusPb { + @JsonIgnore + @QueryParam("cluster_id") + private String clusterId; + + public ClusterStatusPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterStatusPb that = (ClusterStatusPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(ClusterStatusPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java index e847a4571..c1375bd75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java @@ -21,7 +21,7 @@ public void changeOwner(ChangeClusterOwner request) { String path = "/api/2.1/clusters/change-owner"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ChangeClusterOwnerResponse.class); @@ -35,7 +35,7 @@ public CreateClusterResponse create(CreateCluster request) { String path = "/api/2.1/clusters/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateClusterResponse.class); @@ -49,7 +49,7 @@ public void delete(DeleteCluster request) { String path = "/api/2.1/clusters/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteClusterResponse.class); @@ -63,7 +63,7 @@ public void edit(EditCluster request) { String path = "/api/2.1/clusters/edit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditClusterResponse.class); @@ -77,7 +77,7 @@ public GetEventsResponse events(GetEvents request) { String path = "/api/2.1/clusters/events"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, GetEventsResponse.class); @@ -91,7 +91,7 @@ public ClusterDetails get(GetClusterRequest request) { String path = "/api/2.1/clusters/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ClusterDetails.class); } catch (IOException e) { @@ -106,7 +106,7 @@ public GetClusterPermissionLevelsResponse getPermissionLevels( String.format("/api/2.0/permissions/clusters/%s/permissionLevels", request.getClusterId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetClusterPermissionLevelsResponse.class); } catch (IOException e) { @@ -119,7 +119,7 @@ public ClusterPermissions getPermissions(GetClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ClusterPermissions.class); } catch (IOException e) { @@ -132,7 +132,7 @@ public ListClustersResponse list(ListClustersRequest request) { String path = "/api/2.1/clusters/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListClustersResponse.class); } catch (IOException e) { @@ -169,7 +169,7 @@ public void permanentDelete(PermanentDeleteCluster request) { String path = "/api/2.1/clusters/permanent-delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PermanentDeleteClusterResponse.class); @@ -183,7 +183,7 @@ public void pin(PinCluster request) { String path = "/api/2.1/clusters/pin"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PinClusterResponse.class); @@ -197,7 +197,7 @@ public void resize(ResizeCluster request) { String path = "/api/2.1/clusters/resize"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ResizeClusterResponse.class); @@ -211,7 +211,7 @@ public void restart(RestartCluster request) { String path = "/api/2.1/clusters/restart"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, RestartClusterResponse.class); @@ -225,7 +225,7 @@ public ClusterPermissions setPermissions(ClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ClusterPermissions.class); @@ -251,7 +251,7 @@ public void start(StartCluster request) { String path = "/api/2.1/clusters/start"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, StartClusterResponse.class); @@ -265,7 +265,7 @@ public void unpin(UnpinCluster request) { String path = "/api/2.1/clusters/unpin"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UnpinClusterResponse.class); @@ -279,7 +279,7 @@ public void update(UpdateCluster request) { String path = "/api/2.1/clusters/update"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateClusterResponse.class); @@ -293,7 +293,7 @@ public ClusterPermissions updatePermissions(ClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ClusterPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Command.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Command.java index d6f71db37..b5ee21700 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Command.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Command.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Command.CommandSerializer.class) +@JsonDeserialize(using = Command.CommandDeserializer.class) public class Command { /** Running cluster id */ - @JsonProperty("clusterId") private String clusterId; /** Executable code */ - @JsonProperty("command") private String command; /** Running context id */ - @JsonProperty("contextId") private String contextId; /** */ - @JsonProperty("language") private Language language; public Command setClusterId(String clusterId) { @@ -86,4 +93,43 @@ public String toString() { .add("language", language) .toString(); } + + CommandPb toPb() { + CommandPb pb = new CommandPb(); + pb.setClusterId(clusterId); + pb.setCommand(command); + pb.setContextId(contextId); + pb.setLanguage(language); + + return pb; + } + + static Command fromPb(CommandPb pb) { + Command model = new Command(); + model.setClusterId(pb.getClusterId()); + model.setCommand(pb.getCommand()); + model.setContextId(pb.getContextId()); + model.setLanguage(pb.getLanguage()); + + return model; + } + + public static class CommandSerializer extends JsonSerializer { + @Override + public void serialize(Command value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CommandPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CommandDeserializer extends JsonDeserializer { + @Override + public Command deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CommandPb pb = mapper.readValue(p, CommandPb.class); + return Command.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java index 3cd62c5cb..94c2ddd5d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java @@ -21,7 +21,7 @@ public void cancel(CancelCommand request) { String path = "/api/1.2/commands/cancel"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CancelResponse.class); @@ -35,7 +35,7 @@ public CommandStatusResponse commandStatus(CommandStatusRequest request) { String path = "/api/1.2/commands/status"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CommandStatusResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ContextStatusResponse contextStatus(ContextStatusRequest request) { String path = "/api/1.2/contexts/status"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ContextStatusResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public Created create(CreateContext request) { String path = "/api/1.2/contexts/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Created.class); @@ -75,7 +75,7 @@ public void destroy(DestroyContext request) { String path = "/api/1.2/contexts/destroy"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DestroyResponse.class); @@ -89,7 +89,7 @@ public Created execute(Command request) { String path = "/api/1.2/commands/execute"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Created.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandPb.java new file mode 100755 index 000000000..6fad47d5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CommandPb { + @JsonProperty("clusterId") + private String clusterId; + + @JsonProperty("command") + private String command; + + @JsonProperty("contextId") + private String contextId; + + @JsonProperty("language") + private Language language; + + public CommandPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public CommandPb setCommand(String command) { + this.command = command; + return this; + } + + public String getCommand() { + return command; + } + + public CommandPb setContextId(String contextId) { + this.contextId = contextId; + return this; + } + + public String getContextId() { + return contextId; + } + + public CommandPb setLanguage(Language language) { + this.language = language; + return this; + } + + public Language getLanguage() { + return language; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CommandPb that = (CommandPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(command, that.command) + && Objects.equals(contextId, that.contextId) + && Objects.equals(language, that.language); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, command, contextId, language); + } + + @Override + public String toString() { + return new ToStringer(CommandPb.class) + .add("clusterId", clusterId) + .add("command", command) + .add("contextId", contextId) + .add("language", language) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java index effb7c1a1..bf51e5634 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get command info */ @Generated +@JsonSerialize(using = CommandStatusRequest.CommandStatusRequestSerializer.class) +@JsonDeserialize(using = CommandStatusRequest.CommandStatusRequestDeserializer.class) public class CommandStatusRequest { /** */ - @JsonIgnore - @QueryParam("clusterId") private String clusterId; /** */ - @JsonIgnore - @QueryParam("commandId") private String commandId; /** */ - @JsonIgnore - @QueryParam("contextId") private String contextId; public CommandStatusRequest setClusterId(String clusterId) { @@ -76,4 +80,44 @@ public String toString() { .add("contextId", contextId) .toString(); } + + CommandStatusRequestPb toPb() { + CommandStatusRequestPb pb = new CommandStatusRequestPb(); + pb.setClusterId(clusterId); + pb.setCommandId(commandId); + pb.setContextId(contextId); + + return pb; + } + + static CommandStatusRequest fromPb(CommandStatusRequestPb pb) { + CommandStatusRequest model = new CommandStatusRequest(); + model.setClusterId(pb.getClusterId()); + model.setCommandId(pb.getCommandId()); + model.setContextId(pb.getContextId()); + + return model; + } + + public static class CommandStatusRequestSerializer extends JsonSerializer { + @Override + public void serialize( + CommandStatusRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CommandStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CommandStatusRequestDeserializer + extends JsonDeserializer { + @Override + public CommandStatusRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CommandStatusRequestPb pb = mapper.readValue(p, CommandStatusRequestPb.class); + return CommandStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequestPb.java new file mode 100755 index 000000000..af1cb960c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get command info */ +@Generated +class CommandStatusRequestPb { + @JsonIgnore + @QueryParam("clusterId") + private String clusterId; + + @JsonIgnore + @QueryParam("commandId") + private String commandId; + + @JsonIgnore + @QueryParam("contextId") + private String contextId; + + public CommandStatusRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public CommandStatusRequestPb setCommandId(String commandId) { + this.commandId = commandId; + return this; + } + + public String getCommandId() { + return commandId; + } + + public CommandStatusRequestPb setContextId(String contextId) { + this.contextId = contextId; + return this; + } + + public String getContextId() { + return contextId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CommandStatusRequestPb that = (CommandStatusRequestPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(commandId, that.commandId) + && Objects.equals(contextId, that.contextId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, commandId, contextId); + } + + @Override + public String toString() { + return new ToStringer(CommandStatusRequestPb.class) + .add("clusterId", clusterId) + .add("commandId", commandId) + .add("contextId", contextId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java index 0d0531086..1223e4398 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CommandStatusResponse.CommandStatusResponseSerializer.class) +@JsonDeserialize(using = CommandStatusResponse.CommandStatusResponseDeserializer.class) public class CommandStatusResponse { /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("results") private Results results; /** */ - @JsonProperty("status") private CommandStatus status; public CommandStatusResponse setId(String id) { @@ -71,4 +79,45 @@ public String toString() { .add("status", status) .toString(); } + + CommandStatusResponsePb toPb() { + CommandStatusResponsePb pb = new CommandStatusResponsePb(); + pb.setId(id); + pb.setResults(results); + pb.setStatus(status); + + return pb; + } + + static CommandStatusResponse fromPb(CommandStatusResponsePb pb) { + CommandStatusResponse model = new CommandStatusResponse(); + model.setId(pb.getId()); + model.setResults(pb.getResults()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class CommandStatusResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CommandStatusResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CommandStatusResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CommandStatusResponseDeserializer + extends JsonDeserializer { + @Override + public CommandStatusResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CommandStatusResponsePb pb = mapper.readValue(p, CommandStatusResponsePb.class); + return CommandStatusResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponsePb.java new file mode 100755 index 000000000..960e2b9c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CommandStatusResponsePb { + @JsonProperty("id") + private String id; + + @JsonProperty("results") + private Results results; + + @JsonProperty("status") + private CommandStatus status; + + public CommandStatusResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CommandStatusResponsePb setResults(Results results) { + this.results = results; + return this; + } + + public Results getResults() { + return results; + } + + public CommandStatusResponsePb setStatus(CommandStatus status) { + this.status = status; + return this; + } + + public CommandStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CommandStatusResponsePb that = (CommandStatusResponsePb) o; + return Objects.equals(id, that.id) + && Objects.equals(results, that.results) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(id, results, status); + } + + @Override + public String toString() { + return new ToStringer(CommandStatusResponsePb.class) + .add("id", id) + .add("results", results) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java index 1da091984..15d5ae92b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get status */ @Generated +@JsonSerialize(using = ContextStatusRequest.ContextStatusRequestSerializer.class) +@JsonDeserialize(using = ContextStatusRequest.ContextStatusRequestDeserializer.class) public class ContextStatusRequest { /** */ - @JsonIgnore - @QueryParam("clusterId") private String clusterId; /** */ - @JsonIgnore - @QueryParam("contextId") private String contextId; public ContextStatusRequest setClusterId(String clusterId) { @@ -59,4 +65,42 @@ public String toString() { .add("contextId", contextId) .toString(); } + + ContextStatusRequestPb toPb() { + ContextStatusRequestPb pb = new ContextStatusRequestPb(); + pb.setClusterId(clusterId); + pb.setContextId(contextId); + + return pb; + } + + static ContextStatusRequest fromPb(ContextStatusRequestPb pb) { + ContextStatusRequest model = new ContextStatusRequest(); + model.setClusterId(pb.getClusterId()); + model.setContextId(pb.getContextId()); + + return model; + } + + public static class ContextStatusRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ContextStatusRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ContextStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ContextStatusRequestDeserializer + extends JsonDeserializer { + @Override + public ContextStatusRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ContextStatusRequestPb pb = mapper.readValue(p, ContextStatusRequestPb.class); + return ContextStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequestPb.java new file mode 100755 index 000000000..168828348 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get status */ +@Generated +class ContextStatusRequestPb { + @JsonIgnore + @QueryParam("clusterId") + private String clusterId; + + @JsonIgnore + @QueryParam("contextId") + private String contextId; + + public ContextStatusRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ContextStatusRequestPb setContextId(String contextId) { + this.contextId = contextId; + return this; + } + + public String getContextId() { + return contextId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextStatusRequestPb that = (ContextStatusRequestPb) o; + return Objects.equals(clusterId, that.clusterId) && Objects.equals(contextId, that.contextId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, contextId); + } + + @Override + public String toString() { + return new ToStringer(ContextStatusRequestPb.class) + .add("clusterId", clusterId) + .add("contextId", contextId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java index 5507ed220..a0bc97302 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ContextStatusResponse.ContextStatusResponseSerializer.class) +@JsonDeserialize(using = ContextStatusResponse.ContextStatusResponseDeserializer.class) public class ContextStatusResponse { /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("status") private ContextStatus status; public ContextStatusResponse setId(String id) { @@ -55,4 +64,43 @@ public String toString() { .add("status", status) .toString(); } + + ContextStatusResponsePb toPb() { + ContextStatusResponsePb pb = new ContextStatusResponsePb(); + pb.setId(id); + pb.setStatus(status); + + return pb; + } + + static ContextStatusResponse fromPb(ContextStatusResponsePb pb) { + ContextStatusResponse model = new ContextStatusResponse(); + model.setId(pb.getId()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class ContextStatusResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ContextStatusResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ContextStatusResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ContextStatusResponseDeserializer + extends JsonDeserializer { + @Override + public ContextStatusResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ContextStatusResponsePb pb = mapper.readValue(p, ContextStatusResponsePb.class); + return ContextStatusResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponsePb.java new file mode 100755 index 000000000..a2bf43904 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ContextStatusResponsePb { + @JsonProperty("id") + private String id; + + @JsonProperty("status") + private ContextStatus status; + + public ContextStatusResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ContextStatusResponsePb setStatus(ContextStatus status) { + this.status = status; + return this; + } + + public ContextStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextStatusResponsePb that = (ContextStatusResponsePb) o; + return Objects.equals(id, that.id) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(id, status); + } + + @Override + public String toString() { + return new ToStringer(ContextStatusResponsePb.class) + .add("id", id) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Converters.java new file mode 100755 index 000000000..524029e73 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.compute; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index 027bae1c8..ca4e83e9a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCluster.CreateClusterSerializer.class) +@JsonDeserialize(using = CreateCluster.CreateClusterDeserializer.class) public class CreateCluster { /** * When set to true, fixed and default values from the policy will be used for fields that are * omitted. When set to false, only fixed values from the policy will be applied. */ - @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -31,28 +40,24 @@ public class CreateCluster { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** * When specified, this clones libraries from a source cluster during the creation of a new * cluster. */ - @JsonProperty("clone_from") private CloneCluster cloneFrom; /** @@ -62,7 +67,6 @@ public class CreateCluster { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** @@ -70,7 +74,6 @@ public class CreateCluster { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** @@ -82,7 +85,6 @@ public class CreateCluster { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -112,18 +114,15 @@ public class CreateCluster { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -134,7 +133,6 @@ public class CreateCluster { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -142,18 +140,15 @@ public class CreateCluster { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -161,11 +156,9 @@ public class CreateCluster { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -174,7 +167,6 @@ public class CreateCluster { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** @@ -192,7 +184,6 @@ public class CreateCluster { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** @@ -201,7 +192,6 @@ public class CreateCluster { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -214,21 +204,11 @@ public class CreateCluster { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -238,11 +218,9 @@ public class CreateCluster { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -250,7 +228,6 @@ public class CreateCluster { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -266,14 +243,12 @@ public class CreateCluster { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** @@ -281,24 +256,17 @@ public class CreateCluster { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public CreateCluster setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { @@ -508,24 +476,6 @@ public String getPolicyId() { return policyId; } - public CreateCluster setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public CreateCluster setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public CreateCluster setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -580,16 +530,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public CreateCluster setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public CreateCluster setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -636,15 +576,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -675,15 +612,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -714,17 +648,107 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + CreateClusterPb toPb() { + CreateClusterPb pb = new CreateClusterPb(); + pb.setApplyPolicyDefaultValues(applyPolicyDefaultValues); + pb.setAutoscale(autoscale); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setCloneFrom(cloneFrom); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterName(clusterName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDockerImage(dockerImage); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setKind(kind); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSshPublicKeys(sshPublicKeys); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static CreateCluster fromPb(CreateClusterPb pb) { + CreateCluster model = new CreateCluster(); + model.setApplyPolicyDefaultValues(pb.getApplyPolicyDefaultValues()); + model.setAutoscale(pb.getAutoscale()); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setCloneFrom(pb.getCloneFrom()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterName(pb.getClusterName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDockerImage(pb.getDockerImage()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setKind(pb.getKind()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class CreateClusterSerializer extends JsonSerializer { + @Override + public void serialize(CreateCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateClusterDeserializer extends JsonDeserializer { + @Override + public CreateCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateClusterPb pb = mapper.readValue(p, CreateClusterPb.class); + return CreateCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterPb.java new file mode 100755 index 000000000..f9503ba8f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterPb.java @@ -0,0 +1,496 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateClusterPb { + @JsonProperty("apply_policy_default_values") + private Boolean applyPolicyDefaultValues; + + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("clone_from") + private CloneCluster cloneFrom; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public CreateClusterPb setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { + this.applyPolicyDefaultValues = applyPolicyDefaultValues; + return this; + } + + public Boolean getApplyPolicyDefaultValues() { + return applyPolicyDefaultValues; + } + + public CreateClusterPb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public CreateClusterPb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public CreateClusterPb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public CreateClusterPb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public CreateClusterPb setCloneFrom(CloneCluster cloneFrom) { + this.cloneFrom = cloneFrom; + return this; + } + + public CloneCluster getCloneFrom() { + return cloneFrom; + } + + public CreateClusterPb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public CreateClusterPb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public CreateClusterPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public CreateClusterPb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public CreateClusterPb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public CreateClusterPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public CreateClusterPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public CreateClusterPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public CreateClusterPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public CreateClusterPb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public CreateClusterPb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public CreateClusterPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public CreateClusterPb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public CreateClusterPb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public CreateClusterPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public CreateClusterPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public CreateClusterPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public CreateClusterPb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public CreateClusterPb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public CreateClusterPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public CreateClusterPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public CreateClusterPb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public CreateClusterPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public CreateClusterPb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public CreateClusterPb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateClusterPb that = (CreateClusterPb) o; + return Objects.equals(applyPolicyDefaultValues, that.applyPolicyDefaultValues) + && Objects.equals(autoscale, that.autoscale) + && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(cloneFrom, that.cloneFrom) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(kind, that.kind) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + applyPolicyDefaultValues, + autoscale, + autoterminationMinutes, + awsAttributes, + azureAttributes, + cloneFrom, + clusterLogConf, + clusterName, + customTags, + dataSecurityMode, + dockerImage, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + kind, + nodeTypeId, + numWorkers, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkEnvVars, + sparkVersion, + sshPublicKeys, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(CreateClusterPb.class) + .add("applyPolicyDefaultValues", applyPolicyDefaultValues) + .add("autoscale", autoscale) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("cloneFrom", cloneFrom) + .add("clusterLogConf", clusterLogConf) + .add("clusterName", clusterName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("dockerImage", dockerImage) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("kind", kind) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("sshPublicKeys", sshPublicKeys) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java index c97b23145..759aef33e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateClusterResponse.CreateClusterResponseSerializer.class) +@JsonDeserialize(using = CreateClusterResponse.CreateClusterResponseDeserializer.class) public class CreateClusterResponse { /** */ - @JsonProperty("cluster_id") private String clusterId; public CreateClusterResponse setClusterId(String clusterId) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateClusterResponse.class).add("clusterId", clusterId).toString(); } + + CreateClusterResponsePb toPb() { + CreateClusterResponsePb pb = new CreateClusterResponsePb(); + pb.setClusterId(clusterId); + + return pb; + } + + static CreateClusterResponse fromPb(CreateClusterResponsePb pb) { + CreateClusterResponse model = new CreateClusterResponse(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class CreateClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateClusterResponseDeserializer + extends JsonDeserializer { + @Override + public CreateClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateClusterResponsePb pb = mapper.readValue(p, CreateClusterResponsePb.class); + return CreateClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponsePb.java new file mode 100755 index 000000000..5d9142c90 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateClusterResponsePb { + @JsonProperty("cluster_id") + private String clusterId; + + public CreateClusterResponsePb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateClusterResponsePb that = (CreateClusterResponsePb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(CreateClusterResponsePb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java index f904593e6..625a62a82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateContext.CreateContextSerializer.class) +@JsonDeserialize(using = CreateContext.CreateContextDeserializer.class) public class CreateContext { /** Running cluster id */ - @JsonProperty("clusterId") private String clusterId; /** */ - @JsonProperty("language") private Language language; public CreateContext setClusterId(String clusterId) { @@ -55,4 +64,39 @@ public String toString() { .add("language", language) .toString(); } + + CreateContextPb toPb() { + CreateContextPb pb = new CreateContextPb(); + pb.setClusterId(clusterId); + pb.setLanguage(language); + + return pb; + } + + static CreateContext fromPb(CreateContextPb pb) { + CreateContext model = new CreateContext(); + model.setClusterId(pb.getClusterId()); + model.setLanguage(pb.getLanguage()); + + return model; + } + + public static class CreateContextSerializer extends JsonSerializer { + @Override + public void serialize(CreateContext value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateContextPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateContextDeserializer extends JsonDeserializer { + @Override + public CreateContext deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateContextPb pb = mapper.readValue(p, CreateContextPb.class); + return CreateContext.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContextPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContextPb.java new file mode 100755 index 000000000..96250ee83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContextPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateContextPb { + @JsonProperty("clusterId") + private String clusterId; + + @JsonProperty("language") + private Language language; + + public CreateContextPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public CreateContextPb setLanguage(Language language) { + this.language = language; + return this; + } + + public Language getLanguage() { + return language; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateContextPb that = (CreateContextPb) o; + return Objects.equals(clusterId, that.clusterId) && Objects.equals(language, that.language); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, language); + } + + @Override + public String toString() { + return new ToStringer(CreateContextPb.class) + .add("clusterId", clusterId) + .add("language", language) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java index dcd40b667..213f0cfde 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateInstancePool.CreateInstancePoolSerializer.class) +@JsonDeserialize(using = CreateInstancePool.CreateInstancePoolDeserializer.class) public class CreateInstancePool { /** * Attributes related to instance pools running on Amazon Web Services. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private InstancePoolAwsAttributes awsAttributes; /** * Attributes related to instance pools running on Azure. If not specified at pool creation, a set * of default values will be used. */ - @JsonProperty("azure_attributes") private InstancePoolAzureAttributes azureAttributes; /** @@ -31,11 +40,9 @@ public class CreateInstancePool { * *

- Currently, Databricks allows at most 45 custom tags */ - @JsonProperty("custom_tags") private Map customTags; /** Defines the specification of the disks that will be attached to all spark containers. */ - @JsonProperty("disk_spec") private DiskSpec diskSpec; /** @@ -44,14 +51,12 @@ public class CreateInstancePool { * feature requires specific AWS permissions to function correctly - refer to the User Guide for * more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** * Attributes related to instance pools running on Google Cloud Platform. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private InstancePoolGcpAttributes gcpAttributes; /** @@ -61,14 +66,12 @@ public class CreateInstancePool { * must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove * idle instances from the cache if min cache size could still hold. */ - @JsonProperty("idle_instance_autotermination_minutes") private Long idleInstanceAutoterminationMinutes; /** * Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 * characters. */ - @JsonProperty("instance_pool_name") private String instancePoolName; /** @@ -76,11 +79,9 @@ public class CreateInstancePool { * clusters and idle instances. Clusters that require further instance provisioning will fail * during upsize requests. */ - @JsonProperty("max_capacity") private Long maxCapacity; /** Minimum number of idle instances to keep in the instance pool */ - @JsonProperty("min_idle_instances") private Long minIdleInstances; /** @@ -89,11 +90,9 @@ public class CreateInstancePool { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** Custom Docker Image BYOC */ - @JsonProperty("preloaded_docker_images") private Collection preloadedDockerImages; /** @@ -101,7 +100,6 @@ public class CreateInstancePool { * started with the preloaded Spark version will start faster. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("preloaded_spark_versions") private Collection preloadedSparkVersions; public CreateInstancePool setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { @@ -280,4 +278,62 @@ public String toString() { .add("preloadedSparkVersions", preloadedSparkVersions) .toString(); } + + CreateInstancePoolPb toPb() { + CreateInstancePoolPb pb = new CreateInstancePoolPb(); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setCustomTags(customTags); + pb.setDiskSpec(diskSpec); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setGcpAttributes(gcpAttributes); + pb.setIdleInstanceAutoterminationMinutes(idleInstanceAutoterminationMinutes); + pb.setInstancePoolName(instancePoolName); + pb.setMaxCapacity(maxCapacity); + pb.setMinIdleInstances(minIdleInstances); + pb.setNodeTypeId(nodeTypeId); + pb.setPreloadedDockerImages(preloadedDockerImages); + pb.setPreloadedSparkVersions(preloadedSparkVersions); + + return pb; + } + + static CreateInstancePool fromPb(CreateInstancePoolPb pb) { + CreateInstancePool model = new CreateInstancePool(); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setCustomTags(pb.getCustomTags()); + model.setDiskSpec(pb.getDiskSpec()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setIdleInstanceAutoterminationMinutes(pb.getIdleInstanceAutoterminationMinutes()); + model.setInstancePoolName(pb.getInstancePoolName()); + model.setMaxCapacity(pb.getMaxCapacity()); + model.setMinIdleInstances(pb.getMinIdleInstances()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setPreloadedDockerImages(pb.getPreloadedDockerImages()); + model.setPreloadedSparkVersions(pb.getPreloadedSparkVersions()); + + return model; + } + + public static class CreateInstancePoolSerializer extends JsonSerializer { + @Override + public void serialize(CreateInstancePool value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateInstancePoolPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateInstancePoolDeserializer extends JsonDeserializer { + @Override + public CreateInstancePool deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateInstancePoolPb pb = mapper.readValue(p, CreateInstancePoolPb.class); + return CreateInstancePool.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolPb.java new file mode 100755 index 000000000..b3968d48d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolPb.java @@ -0,0 +1,229 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateInstancePoolPb { + @JsonProperty("aws_attributes") + private InstancePoolAwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private InstancePoolAzureAttributes azureAttributes; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("disk_spec") + private DiskSpec diskSpec; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("gcp_attributes") + private InstancePoolGcpAttributes gcpAttributes; + + @JsonProperty("idle_instance_autotermination_minutes") + private Long idleInstanceAutoterminationMinutes; + + @JsonProperty("instance_pool_name") + private String instancePoolName; + + @JsonProperty("max_capacity") + private Long maxCapacity; + + @JsonProperty("min_idle_instances") + private Long minIdleInstances; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("preloaded_docker_images") + private Collection preloadedDockerImages; + + @JsonProperty("preloaded_spark_versions") + private Collection preloadedSparkVersions; + + public CreateInstancePoolPb setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public InstancePoolAwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public CreateInstancePoolPb setAzureAttributes(InstancePoolAzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public InstancePoolAzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public CreateInstancePoolPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public CreateInstancePoolPb setDiskSpec(DiskSpec diskSpec) { + this.diskSpec = diskSpec; + return this; + } + + public DiskSpec getDiskSpec() { + return diskSpec; + } + + public CreateInstancePoolPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public CreateInstancePoolPb setGcpAttributes(InstancePoolGcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public InstancePoolGcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public CreateInstancePoolPb setIdleInstanceAutoterminationMinutes( + Long idleInstanceAutoterminationMinutes) { + this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; + return this; + } + + public Long getIdleInstanceAutoterminationMinutes() { + return idleInstanceAutoterminationMinutes; + } + + public CreateInstancePoolPb setInstancePoolName(String instancePoolName) { + this.instancePoolName = instancePoolName; + return this; + } + + public String getInstancePoolName() { + return instancePoolName; + } + + public CreateInstancePoolPb setMaxCapacity(Long maxCapacity) { + this.maxCapacity = maxCapacity; + return this; + } + + public Long getMaxCapacity() { + return maxCapacity; + } + + public CreateInstancePoolPb setMinIdleInstances(Long minIdleInstances) { + this.minIdleInstances = minIdleInstances; + return this; + } + + public Long getMinIdleInstances() { + return minIdleInstances; + } + + public CreateInstancePoolPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public CreateInstancePoolPb setPreloadedDockerImages( + Collection preloadedDockerImages) { + this.preloadedDockerImages = preloadedDockerImages; + return this; + } + + public Collection getPreloadedDockerImages() { + return preloadedDockerImages; + } + + public CreateInstancePoolPb setPreloadedSparkVersions(Collection preloadedSparkVersions) { + this.preloadedSparkVersions = preloadedSparkVersions; + return this; + } + + public Collection getPreloadedSparkVersions() { + return preloadedSparkVersions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateInstancePoolPb that = (CreateInstancePoolPb) o; + return Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(customTags, that.customTags) + && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals( + idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) + && Objects.equals(instancePoolName, that.instancePoolName) + && Objects.equals(maxCapacity, that.maxCapacity) + && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) + && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions); + } + + @Override + public int hashCode() { + return Objects.hash( + awsAttributes, + azureAttributes, + customTags, + diskSpec, + enableElasticDisk, + gcpAttributes, + idleInstanceAutoterminationMinutes, + instancePoolName, + maxCapacity, + minIdleInstances, + nodeTypeId, + preloadedDockerImages, + preloadedSparkVersions); + } + + @Override + public String toString() { + return new ToStringer(CreateInstancePoolPb.class) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("customTags", customTags) + .add("diskSpec", diskSpec) + .add("enableElasticDisk", enableElasticDisk) + .add("gcpAttributes", gcpAttributes) + .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) + .add("instancePoolName", instancePoolName) + .add("maxCapacity", maxCapacity) + .add("minIdleInstances", minIdleInstances) + .add("nodeTypeId", nodeTypeId) + .add("preloadedDockerImages", preloadedDockerImages) + .add("preloadedSparkVersions", preloadedSparkVersions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java index 995f2f5b3..b7635fc52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateInstancePoolResponse.CreateInstancePoolResponseSerializer.class) +@JsonDeserialize(using = CreateInstancePoolResponse.CreateInstancePoolResponseDeserializer.class) public class CreateInstancePoolResponse { /** The ID of the created instance pool. */ - @JsonProperty("instance_pool_id") private String instancePoolId; public CreateInstancePoolResponse setInstancePoolId(String instancePoolId) { @@ -41,4 +51,41 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + CreateInstancePoolResponsePb toPb() { + CreateInstancePoolResponsePb pb = new CreateInstancePoolResponsePb(); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static CreateInstancePoolResponse fromPb(CreateInstancePoolResponsePb pb) { + CreateInstancePoolResponse model = new CreateInstancePoolResponse(); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class CreateInstancePoolResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateInstancePoolResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateInstancePoolResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateInstancePoolResponseDeserializer + extends JsonDeserializer { + @Override + public CreateInstancePoolResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateInstancePoolResponsePb pb = mapper.readValue(p, CreateInstancePoolResponsePb.class); + return CreateInstancePoolResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponsePb.java new file mode 100755 index 000000000..682190f82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateInstancePoolResponsePb { + @JsonProperty("instance_pool_id") + private String instancePoolId; + + public CreateInstancePoolResponsePb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateInstancePoolResponsePb that = (CreateInstancePoolResponsePb) o; + return Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(CreateInstancePoolResponsePb.class) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java index 8b44ccd98..62e2582ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePolicy.CreatePolicySerializer.class) +@JsonDeserialize(using = CreatePolicy.CreatePolicyDeserializer.class) public class CreatePolicy { /** * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -16,32 +27,27 @@ public class CreatePolicy { *

[Databricks Cluster Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("definition") private String definition; /** Additional human-readable description of the cluster policy. */ - @JsonProperty("description") private String description; /** * A list of libraries to be installed on the next cluster restart that uses this policy. The * maximum number of libraries is 500. */ - @JsonProperty("libraries") private Collection libraries; /** * Max number of clusters per user that can be active using this policy. If not present, there is * no max limit. */ - @JsonProperty("max_clusters_per_user") private Long maxClustersPerUser; /** * Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and * 100 characters. */ - @JsonProperty("name") private String name; /** @@ -54,7 +60,6 @@ public class CreatePolicy { *

[Databricks Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; /** @@ -64,7 +69,6 @@ public class CreatePolicy { *

Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to * customize the policy definition. */ - @JsonProperty("policy_family_id") private String policyFamilyId; public CreatePolicy setDefinition(String definition) { @@ -168,4 +172,49 @@ public String toString() { .add("policyFamilyId", policyFamilyId) .toString(); } + + CreatePolicyPb toPb() { + CreatePolicyPb pb = new CreatePolicyPb(); + pb.setDefinition(definition); + pb.setDescription(description); + pb.setLibraries(libraries); + pb.setMaxClustersPerUser(maxClustersPerUser); + pb.setName(name); + pb.setPolicyFamilyDefinitionOverrides(policyFamilyDefinitionOverrides); + pb.setPolicyFamilyId(policyFamilyId); + + return pb; + } + + static CreatePolicy fromPb(CreatePolicyPb pb) { + CreatePolicy model = new CreatePolicy(); + model.setDefinition(pb.getDefinition()); + model.setDescription(pb.getDescription()); + model.setLibraries(pb.getLibraries()); + model.setMaxClustersPerUser(pb.getMaxClustersPerUser()); + model.setName(pb.getName()); + model.setPolicyFamilyDefinitionOverrides(pb.getPolicyFamilyDefinitionOverrides()); + model.setPolicyFamilyId(pb.getPolicyFamilyId()); + + return model; + } + + public static class CreatePolicySerializer extends JsonSerializer { + @Override + public void serialize(CreatePolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePolicyDeserializer extends JsonDeserializer { + @Override + public CreatePolicy deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePolicyPb pb = mapper.readValue(p, CreatePolicyPb.class); + return CreatePolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyPb.java new file mode 100755 index 000000000..d41b4a8b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyPb.java @@ -0,0 +1,135 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreatePolicyPb { + @JsonProperty("definition") + private String definition; + + @JsonProperty("description") + private String description; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("max_clusters_per_user") + private Long maxClustersPerUser; + + @JsonProperty("name") + private String name; + + @JsonProperty("policy_family_definition_overrides") + private String policyFamilyDefinitionOverrides; + + @JsonProperty("policy_family_id") + private String policyFamilyId; + + public CreatePolicyPb setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public CreatePolicyPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreatePolicyPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public CreatePolicyPb setMaxClustersPerUser(Long maxClustersPerUser) { + this.maxClustersPerUser = maxClustersPerUser; + return this; + } + + public Long getMaxClustersPerUser() { + return maxClustersPerUser; + } + + public CreatePolicyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreatePolicyPb setPolicyFamilyDefinitionOverrides(String policyFamilyDefinitionOverrides) { + this.policyFamilyDefinitionOverrides = policyFamilyDefinitionOverrides; + return this; + } + + public String getPolicyFamilyDefinitionOverrides() { + return policyFamilyDefinitionOverrides; + } + + public CreatePolicyPb setPolicyFamilyId(String policyFamilyId) { + this.policyFamilyId = policyFamilyId; + return this; + } + + public String getPolicyFamilyId() { + return policyFamilyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePolicyPb that = (CreatePolicyPb) o; + return Objects.equals(definition, that.definition) + && Objects.equals(description, that.description) + && Objects.equals(libraries, that.libraries) + && Objects.equals(maxClustersPerUser, that.maxClustersPerUser) + && Objects.equals(name, that.name) + && Objects.equals(policyFamilyDefinitionOverrides, that.policyFamilyDefinitionOverrides) + && Objects.equals(policyFamilyId, that.policyFamilyId); + } + + @Override + public int hashCode() { + return Objects.hash( + definition, + description, + libraries, + maxClustersPerUser, + name, + policyFamilyDefinitionOverrides, + policyFamilyId); + } + + @Override + public String toString() { + return new ToStringer(CreatePolicyPb.class) + .add("definition", definition) + .add("description", description) + .add("libraries", libraries) + .add("maxClustersPerUser", maxClustersPerUser) + .add("name", name) + .add("policyFamilyDefinitionOverrides", policyFamilyDefinitionOverrides) + .add("policyFamilyId", policyFamilyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponse.java index 227bc03d6..1a1f203b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePolicyResponse.CreatePolicyResponseSerializer.class) +@JsonDeserialize(using = CreatePolicyResponse.CreatePolicyResponseDeserializer.class) public class CreatePolicyResponse { /** Canonical unique identifier for the cluster policy. */ - @JsonProperty("policy_id") private String policyId; public CreatePolicyResponse setPolicyId(String policyId) { @@ -39,4 +49,40 @@ public int hashCode() { public String toString() { return new ToStringer(CreatePolicyResponse.class).add("policyId", policyId).toString(); } + + CreatePolicyResponsePb toPb() { + CreatePolicyResponsePb pb = new CreatePolicyResponsePb(); + pb.setPolicyId(policyId); + + return pb; + } + + static CreatePolicyResponse fromPb(CreatePolicyResponsePb pb) { + CreatePolicyResponse model = new CreatePolicyResponse(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class CreatePolicyResponseSerializer extends JsonSerializer { + @Override + public void serialize( + CreatePolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePolicyResponseDeserializer + extends JsonDeserializer { + @Override + public CreatePolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePolicyResponsePb pb = mapper.readValue(p, CreatePolicyResponsePb.class); + return CreatePolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponsePb.java new file mode 100755 index 000000000..2236070e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicyResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePolicyResponsePb { + @JsonProperty("policy_id") + private String policyId; + + public CreatePolicyResponsePb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePolicyResponsePb that = (CreatePolicyResponsePb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(CreatePolicyResponsePb.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponse.java index bc9dd43b6..ecfc2facb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateResponse.CreateResponseSerializer.class) +@JsonDeserialize(using = CreateResponse.CreateResponseDeserializer.class) public class CreateResponse { /** The global init script ID. */ - @JsonProperty("script_id") private String scriptId; public CreateResponse setScriptId(String scriptId) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(CreateResponse.class).add("scriptId", scriptId).toString(); } + + CreateResponsePb toPb() { + CreateResponsePb pb = new CreateResponsePb(); + pb.setScriptId(scriptId); + + return pb; + } + + static CreateResponse fromPb(CreateResponsePb pb) { + CreateResponse model = new CreateResponse(); + model.setScriptId(pb.getScriptId()); + + return model; + } + + public static class CreateResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateResponseDeserializer extends JsonDeserializer { + @Override + public CreateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateResponsePb pb = mapper.readValue(p, CreateResponsePb.class); + return CreateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponsePb.java new file mode 100755 index 000000000..452d6edd2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateResponsePb { + @JsonProperty("script_id") + private String scriptId; + + public CreateResponsePb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateResponsePb that = (CreateResponsePb) o; + return Objects.equals(scriptId, that.scriptId); + } + + @Override + public int hashCode() { + return Objects.hash(scriptId); + } + + @Override + public String toString() { + return new ToStringer(CreateResponsePb.class).add("scriptId", scriptId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Created.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Created.java index 4667668f3..40e72e81a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Created.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Created.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Created.CreatedSerializer.class) +@JsonDeserialize(using = Created.CreatedDeserializer.class) public class Created { /** */ - @JsonProperty("id") private String id; public Created setId(String id) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Created.class).add("id", id).toString(); } + + CreatedPb toPb() { + CreatedPb pb = new CreatedPb(); + pb.setId(id); + + return pb; + } + + static Created fromPb(CreatedPb pb) { + Created model = new Created(); + model.setId(pb.getId()); + + return model; + } + + public static class CreatedSerializer extends JsonSerializer { + @Override + public void serialize(Created value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatedPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatedDeserializer extends JsonDeserializer { + @Override + public Created deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatedPb pb = mapper.readValue(p, CreatedPb.class); + return Created.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatedPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatedPb.java new file mode 100755 index 000000000..d8a7201e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatedPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatedPb { + @JsonProperty("id") + private String id; + + public CreatedPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatedPb that = (CreatedPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CreatedPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java index a2765d651..614bf2134 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CustomPolicyTag.CustomPolicyTagSerializer.class) +@JsonDeserialize(using = CustomPolicyTag.CustomPolicyTagDeserializer.class) public class CustomPolicyTag { /** * The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be @@ -17,7 +28,6 @@ public class CustomPolicyTag { *

- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17) */ - @JsonProperty("key") private String key; /** @@ -26,7 +36,6 @@ public class CustomPolicyTag { *

- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24) */ - @JsonProperty("value") private String value; public CustomPolicyTag setKey(String key) { @@ -64,4 +73,40 @@ public int hashCode() { public String toString() { return new ToStringer(CustomPolicyTag.class).add("key", key).add("value", value).toString(); } + + CustomPolicyTagPb toPb() { + CustomPolicyTagPb pb = new CustomPolicyTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static CustomPolicyTag fromPb(CustomPolicyTagPb pb) { + CustomPolicyTag model = new CustomPolicyTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class CustomPolicyTagSerializer extends JsonSerializer { + @Override + public void serialize(CustomPolicyTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CustomPolicyTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomPolicyTagDeserializer extends JsonDeserializer { + @Override + public CustomPolicyTag deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomPolicyTagPb pb = mapper.readValue(p, CustomPolicyTagPb.class); + return CustomPolicyTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTagPb.java new file mode 100755 index 000000000..84a6fc0f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CustomPolicyTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public CustomPolicyTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public CustomPolicyTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomPolicyTagPb that = (CustomPolicyTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(CustomPolicyTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java index 4ff098588..733a371e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DataPlaneEventDetails.DataPlaneEventDetailsSerializer.class) +@JsonDeserialize(using = DataPlaneEventDetails.DataPlaneEventDetailsDeserializer.class) public class DataPlaneEventDetails { /** */ - @JsonProperty("event_type") private DataPlaneEventDetailsEventType eventType; /** */ - @JsonProperty("executor_failures") private Long executorFailures; /** */ - @JsonProperty("host_id") private String hostId; /** */ - @JsonProperty("timestamp") private Long timestamp; public DataPlaneEventDetails setEventType(DataPlaneEventDetailsEventType eventType) { @@ -86,4 +93,47 @@ public String toString() { .add("timestamp", timestamp) .toString(); } + + DataPlaneEventDetailsPb toPb() { + DataPlaneEventDetailsPb pb = new DataPlaneEventDetailsPb(); + pb.setEventType(eventType); + pb.setExecutorFailures(executorFailures); + pb.setHostId(hostId); + pb.setTimestamp(timestamp); + + return pb; + } + + static DataPlaneEventDetails fromPb(DataPlaneEventDetailsPb pb) { + DataPlaneEventDetails model = new DataPlaneEventDetails(); + model.setEventType(pb.getEventType()); + model.setExecutorFailures(pb.getExecutorFailures()); + model.setHostId(pb.getHostId()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class DataPlaneEventDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + DataPlaneEventDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataPlaneEventDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataPlaneEventDetailsDeserializer + extends JsonDeserializer { + @Override + public DataPlaneEventDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataPlaneEventDetailsPb pb = mapper.readValue(p, DataPlaneEventDetailsPb.class); + return DataPlaneEventDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsPb.java new file mode 100755 index 000000000..bef3efde3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DataPlaneEventDetailsPb { + @JsonProperty("event_type") + private DataPlaneEventDetailsEventType eventType; + + @JsonProperty("executor_failures") + private Long executorFailures; + + @JsonProperty("host_id") + private String hostId; + + @JsonProperty("timestamp") + private Long timestamp; + + public DataPlaneEventDetailsPb setEventType(DataPlaneEventDetailsEventType eventType) { + this.eventType = eventType; + return this; + } + + public DataPlaneEventDetailsEventType getEventType() { + return eventType; + } + + public DataPlaneEventDetailsPb setExecutorFailures(Long executorFailures) { + this.executorFailures = executorFailures; + return this; + } + + public Long getExecutorFailures() { + return executorFailures; + } + + public DataPlaneEventDetailsPb setHostId(String hostId) { + this.hostId = hostId; + return this; + } + + public String getHostId() { + return hostId; + } + + public DataPlaneEventDetailsPb setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + public Long getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataPlaneEventDetailsPb that = (DataPlaneEventDetailsPb) o; + return Objects.equals(eventType, that.eventType) + && Objects.equals(executorFailures, that.executorFailures) + && Objects.equals(hostId, that.hostId) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(eventType, executorFailures, hostId, timestamp); + } + + @Override + public String toString() { + return new ToStringer(DataPlaneEventDetailsPb.class) + .add("eventType", eventType) + .add("executorFailures", executorFailures) + .add("hostId", hostId) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java index a41d4b921..516d95811 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location in DBFS */ @Generated +@JsonSerialize(using = DbfsStorageInfo.DbfsStorageInfoSerializer.class) +@JsonDeserialize(using = DbfsStorageInfo.DbfsStorageInfoDeserializer.class) public class DbfsStorageInfo { /** dbfs destination, e.g. `dbfs:/my/path` */ - @JsonProperty("destination") private String destination; public DbfsStorageInfo setDestination(String destination) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DbfsStorageInfo.class).add("destination", destination).toString(); } + + DbfsStorageInfoPb toPb() { + DbfsStorageInfoPb pb = new DbfsStorageInfoPb(); + pb.setDestination(destination); + + return pb; + } + + static DbfsStorageInfo fromPb(DbfsStorageInfoPb pb) { + DbfsStorageInfo model = new DbfsStorageInfo(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class DbfsStorageInfoSerializer extends JsonSerializer { + @Override + public void serialize(DbfsStorageInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbfsStorageInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbfsStorageInfoDeserializer extends JsonDeserializer { + @Override + public DbfsStorageInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbfsStorageInfoPb pb = mapper.readValue(p, DbfsStorageInfoPb.class); + return DbfsStorageInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfoPb.java new file mode 100755 index 000000000..6ea9f310f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location in DBFS */ +@Generated +class DbfsStorageInfoPb { + @JsonProperty("destination") + private String destination; + + public DbfsStorageInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbfsStorageInfoPb that = (DbfsStorageInfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(DbfsStorageInfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java index 36f74cbfa..cddfb8b91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteCluster.DeleteClusterSerializer.class) +@JsonDeserialize(using = DeleteCluster.DeleteClusterDeserializer.class) public class DeleteCluster { /** The cluster to be terminated. */ - @JsonProperty("cluster_id") private String clusterId; public DeleteCluster setClusterId(String clusterId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCluster.class).add("clusterId", clusterId).toString(); } + + DeleteClusterPb toPb() { + DeleteClusterPb pb = new DeleteClusterPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static DeleteCluster fromPb(DeleteClusterPb pb) { + DeleteCluster model = new DeleteCluster(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class DeleteClusterSerializer extends JsonSerializer { + @Override + public void serialize(DeleteCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteClusterDeserializer extends JsonDeserializer { + @Override + public DeleteCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteClusterPb pb = mapper.readValue(p, DeleteClusterPb.class); + return DeleteCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterPb.java new file mode 100755 index 000000000..38a3e9084 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + public DeleteClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteClusterPb that = (DeleteClusterPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(DeleteClusterPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java index 91fc276a6..ccc5f6828 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteClusterResponse.DeleteClusterResponseSerializer.class) +@JsonDeserialize(using = DeleteClusterResponse.DeleteClusterResponseDeserializer.class) public class DeleteClusterResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteClusterResponse.class).toString(); } + + DeleteClusterResponsePb toPb() { + DeleteClusterResponsePb pb = new DeleteClusterResponsePb(); + + return pb; + } + + static DeleteClusterResponse fromPb(DeleteClusterResponsePb pb) { + DeleteClusterResponse model = new DeleteClusterResponse(); + + return model; + } + + public static class DeleteClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteClusterResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteClusterResponsePb pb = mapper.readValue(p, DeleteClusterResponsePb.class); + return DeleteClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponsePb.java new file mode 100755 index 000000000..9b3ef302f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java index 993e73ca0..70eb0c3b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete init script */ @Generated +@JsonSerialize(using = DeleteGlobalInitScriptRequest.DeleteGlobalInitScriptRequestSerializer.class) +@JsonDeserialize( + using = DeleteGlobalInitScriptRequest.DeleteGlobalInitScriptRequestDeserializer.class) public class DeleteGlobalInitScriptRequest { /** The ID of the global init script. */ - @JsonIgnore private String scriptId; + private String scriptId; public DeleteGlobalInitScriptRequest setScriptId(String scriptId) { this.scriptId = scriptId; @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteGlobalInitScriptRequest.class).add("scriptId", scriptId).toString(); } + + DeleteGlobalInitScriptRequestPb toPb() { + DeleteGlobalInitScriptRequestPb pb = new DeleteGlobalInitScriptRequestPb(); + pb.setScriptId(scriptId); + + return pb; + } + + static DeleteGlobalInitScriptRequest fromPb(DeleteGlobalInitScriptRequestPb pb) { + DeleteGlobalInitScriptRequest model = new DeleteGlobalInitScriptRequest(); + model.setScriptId(pb.getScriptId()); + + return model; + } + + public static class DeleteGlobalInitScriptRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteGlobalInitScriptRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteGlobalInitScriptRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteGlobalInitScriptRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteGlobalInitScriptRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteGlobalInitScriptRequestPb pb = + mapper.readValue(p, DeleteGlobalInitScriptRequestPb.class); + return DeleteGlobalInitScriptRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequestPb.java new file mode 100755 index 000000000..30d9bf5bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete init script */ +@Generated +class DeleteGlobalInitScriptRequestPb { + @JsonIgnore private String scriptId; + + public DeleteGlobalInitScriptRequestPb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteGlobalInitScriptRequestPb that = (DeleteGlobalInitScriptRequestPb) o; + return Objects.equals(scriptId, that.scriptId); + } + + @Override + public int hashCode() { + return Objects.hash(scriptId); + } + + @Override + public String toString() { + return new ToStringer(DeleteGlobalInitScriptRequestPb.class) + .add("scriptId", scriptId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java index 8539bd52e..24dc1b78c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteInstancePool.DeleteInstancePoolSerializer.class) +@JsonDeserialize(using = DeleteInstancePool.DeleteInstancePoolDeserializer.class) public class DeleteInstancePool { /** The instance pool to be terminated. */ - @JsonProperty("instance_pool_id") private String instancePoolId; public DeleteInstancePool setInstancePoolId(String instancePoolId) { @@ -41,4 +51,38 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + DeleteInstancePoolPb toPb() { + DeleteInstancePoolPb pb = new DeleteInstancePoolPb(); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static DeleteInstancePool fromPb(DeleteInstancePoolPb pb) { + DeleteInstancePool model = new DeleteInstancePool(); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class DeleteInstancePoolSerializer extends JsonSerializer { + @Override + public void serialize(DeleteInstancePool value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteInstancePoolPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteInstancePoolDeserializer extends JsonDeserializer { + @Override + public DeleteInstancePool deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteInstancePoolPb pb = mapper.readValue(p, DeleteInstancePoolPb.class); + return DeleteInstancePool.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolPb.java new file mode 100755 index 000000000..2a5a5ebf3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteInstancePoolPb { + @JsonProperty("instance_pool_id") + private String instancePoolId; + + public DeleteInstancePoolPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteInstancePoolPb that = (DeleteInstancePoolPb) o; + return Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstancePoolPb.class) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java index 3a71cf38e..18b06bff7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteInstancePoolResponse.DeleteInstancePoolResponseSerializer.class) +@JsonDeserialize(using = DeleteInstancePoolResponse.DeleteInstancePoolResponseDeserializer.class) public class DeleteInstancePoolResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteInstancePoolResponse.class).toString(); } + + DeleteInstancePoolResponsePb toPb() { + DeleteInstancePoolResponsePb pb = new DeleteInstancePoolResponsePb(); + + return pb; + } + + static DeleteInstancePoolResponse fromPb(DeleteInstancePoolResponsePb pb) { + DeleteInstancePoolResponse model = new DeleteInstancePoolResponse(); + + return model; + } + + public static class DeleteInstancePoolResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteInstancePoolResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteInstancePoolResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteInstancePoolResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteInstancePoolResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteInstancePoolResponsePb pb = mapper.readValue(p, DeleteInstancePoolResponsePb.class); + return DeleteInstancePoolResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponsePb.java new file mode 100755 index 000000000..dc0b7e9e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteInstancePoolResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstancePoolResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java index dc0449d94..d4b3f6929 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeletePolicy.DeletePolicySerializer.class) +@JsonDeserialize(using = DeletePolicy.DeletePolicyDeserializer.class) public class DeletePolicy { /** The ID of the policy to delete. */ - @JsonProperty("policy_id") private String policyId; public DeletePolicy setPolicyId(String policyId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePolicy.class).add("policyId", policyId).toString(); } + + DeletePolicyPb toPb() { + DeletePolicyPb pb = new DeletePolicyPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static DeletePolicy fromPb(DeletePolicyPb pb) { + DeletePolicy model = new DeletePolicy(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class DeletePolicySerializer extends JsonSerializer { + @Override + public void serialize(DeletePolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePolicyDeserializer extends JsonDeserializer { + @Override + public DeletePolicy deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePolicyPb pb = mapper.readValue(p, DeletePolicyPb.class); + return DeletePolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyPb.java new file mode 100755 index 000000000..423549126 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeletePolicyPb { + @JsonProperty("policy_id") + private String policyId; + + public DeletePolicyPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePolicyPb that = (DeletePolicyPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(DeletePolicyPb.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java index 60aa4fcc7..58bbc54af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeletePolicyResponse.DeletePolicyResponseSerializer.class) +@JsonDeserialize(using = DeletePolicyResponse.DeletePolicyResponseDeserializer.class) public class DeletePolicyResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePolicyResponse.class).toString(); } + + DeletePolicyResponsePb toPb() { + DeletePolicyResponsePb pb = new DeletePolicyResponsePb(); + + return pb; + } + + static DeletePolicyResponse fromPb(DeletePolicyResponsePb pb) { + DeletePolicyResponse model = new DeletePolicyResponse(); + + return model; + } + + public static class DeletePolicyResponseSerializer extends JsonSerializer { + @Override + public void serialize( + DeletePolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePolicyResponseDeserializer + extends JsonDeserializer { + @Override + public DeletePolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePolicyResponsePb pb = mapper.readValue(p, DeletePolicyResponsePb.class); + return DeletePolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponsePb.java new file mode 100755 index 000000000..04ce727b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeletePolicyResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeletePolicyResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java index 62648157e..f8e2dd47c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponsePb.java new file mode 100755 index 000000000..dbdb112b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java index edc144f8b..8d1658882 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DestroyContext.DestroyContextSerializer.class) +@JsonDeserialize(using = DestroyContext.DestroyContextDeserializer.class) public class DestroyContext { /** */ - @JsonProperty("clusterId") private String clusterId; /** */ - @JsonProperty("contextId") private String contextId; public DestroyContext setClusterId(String clusterId) { @@ -55,4 +64,40 @@ public String toString() { .add("contextId", contextId) .toString(); } + + DestroyContextPb toPb() { + DestroyContextPb pb = new DestroyContextPb(); + pb.setClusterId(clusterId); + pb.setContextId(contextId); + + return pb; + } + + static DestroyContext fromPb(DestroyContextPb pb) { + DestroyContext model = new DestroyContext(); + model.setClusterId(pb.getClusterId()); + model.setContextId(pb.getContextId()); + + return model; + } + + public static class DestroyContextSerializer extends JsonSerializer { + @Override + public void serialize(DestroyContext value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DestroyContextPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DestroyContextDeserializer extends JsonDeserializer { + @Override + public DestroyContext deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DestroyContextPb pb = mapper.readValue(p, DestroyContextPb.class); + return DestroyContext.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContextPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContextPb.java new file mode 100755 index 000000000..841c65945 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContextPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DestroyContextPb { + @JsonProperty("clusterId") + private String clusterId; + + @JsonProperty("contextId") + private String contextId; + + public DestroyContextPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public DestroyContextPb setContextId(String contextId) { + this.contextId = contextId; + return this; + } + + public String getContextId() { + return contextId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DestroyContextPb that = (DestroyContextPb) o; + return Objects.equals(clusterId, that.clusterId) && Objects.equals(contextId, that.contextId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, contextId); + } + + @Override + public String toString() { + return new ToStringer(DestroyContextPb.class) + .add("clusterId", clusterId) + .add("contextId", contextId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java index 2bf691176..8f591b3f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DestroyResponse.DestroyResponseSerializer.class) +@JsonDeserialize(using = DestroyResponse.DestroyResponseDeserializer.class) public class DestroyResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DestroyResponse.class).toString(); } + + DestroyResponsePb toPb() { + DestroyResponsePb pb = new DestroyResponsePb(); + + return pb; + } + + static DestroyResponse fromPb(DestroyResponsePb pb) { + DestroyResponse model = new DestroyResponse(); + + return model; + } + + public static class DestroyResponseSerializer extends JsonSerializer { + @Override + public void serialize(DestroyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DestroyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DestroyResponseDeserializer extends JsonDeserializer { + @Override + public DestroyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DestroyResponsePb pb = mapper.readValue(p, DestroyResponsePb.class); + return DestroyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponsePb.java new file mode 100755 index 000000000..780360221 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DestroyResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DestroyResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java index e9dbb915e..351aaad8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,6 +22,8 @@ * Databricks will launch a total of 6 disks, 100 GiB each, for this cluster. */ @Generated +@JsonSerialize(using = DiskSpec.DiskSpecSerializer.class) +@JsonDeserialize(using = DiskSpec.DiskSpecDeserializer.class) public class DiskSpec { /** * The number of disks launched for each instance: - This feature is only enabled for supported @@ -30,11 +41,9 @@ public class DiskSpec { *

Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: * `/remote_volume0`, `/remote_volume1`, and etc. */ - @JsonProperty("disk_count") private Long diskCount; /** */ - @JsonProperty("disk_iops") private Long diskIops; /** @@ -45,15 +54,12 @@ public class DiskSpec { * *

For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB */ - @JsonProperty("disk_size") private Long diskSize; /** */ - @JsonProperty("disk_throughput") private Long diskThroughput; /** The type of disks that will be launched with this cluster. */ - @JsonProperty("disk_type") private DiskType diskType; public DiskSpec setDiskCount(Long diskCount) { @@ -128,4 +134,45 @@ public String toString() { .add("diskType", diskType) .toString(); } + + DiskSpecPb toPb() { + DiskSpecPb pb = new DiskSpecPb(); + pb.setDiskCount(diskCount); + pb.setDiskIops(diskIops); + pb.setDiskSize(diskSize); + pb.setDiskThroughput(diskThroughput); + pb.setDiskType(diskType); + + return pb; + } + + static DiskSpec fromPb(DiskSpecPb pb) { + DiskSpec model = new DiskSpec(); + model.setDiskCount(pb.getDiskCount()); + model.setDiskIops(pb.getDiskIops()); + model.setDiskSize(pb.getDiskSize()); + model.setDiskThroughput(pb.getDiskThroughput()); + model.setDiskType(pb.getDiskType()); + + return model; + } + + public static class DiskSpecSerializer extends JsonSerializer { + @Override + public void serialize(DiskSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DiskSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DiskSpecDeserializer extends JsonDeserializer { + @Override + public DiskSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DiskSpecPb pb = mapper.readValue(p, DiskSpecPb.class); + return DiskSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpecPb.java new file mode 100755 index 000000000..b245738b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpecPb.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Describes the disks that are launched for each instance in the spark cluster. For example, if the + * cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then + * Databricks will launch a total of 6 disks, 100 GiB each, for this cluster. + */ +@Generated +class DiskSpecPb { + @JsonProperty("disk_count") + private Long diskCount; + + @JsonProperty("disk_iops") + private Long diskIops; + + @JsonProperty("disk_size") + private Long diskSize; + + @JsonProperty("disk_throughput") + private Long diskThroughput; + + @JsonProperty("disk_type") + private DiskType diskType; + + public DiskSpecPb setDiskCount(Long diskCount) { + this.diskCount = diskCount; + return this; + } + + public Long getDiskCount() { + return diskCount; + } + + public DiskSpecPb setDiskIops(Long diskIops) { + this.diskIops = diskIops; + return this; + } + + public Long getDiskIops() { + return diskIops; + } + + public DiskSpecPb setDiskSize(Long diskSize) { + this.diskSize = diskSize; + return this; + } + + public Long getDiskSize() { + return diskSize; + } + + public DiskSpecPb setDiskThroughput(Long diskThroughput) { + this.diskThroughput = diskThroughput; + return this; + } + + public Long getDiskThroughput() { + return diskThroughput; + } + + public DiskSpecPb setDiskType(DiskType diskType) { + this.diskType = diskType; + return this; + } + + public DiskType getDiskType() { + return diskType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DiskSpecPb that = (DiskSpecPb) o; + return Objects.equals(diskCount, that.diskCount) + && Objects.equals(diskIops, that.diskIops) + && Objects.equals(diskSize, that.diskSize) + && Objects.equals(diskThroughput, that.diskThroughput) + && Objects.equals(diskType, that.diskType); + } + + @Override + public int hashCode() { + return Objects.hash(diskCount, diskIops, diskSize, diskThroughput, diskType); + } + + @Override + public String toString() { + return new ToStringer(DiskSpecPb.class) + .add("diskCount", diskCount) + .add("diskIops", diskIops) + .add("diskSize", diskSize) + .add("diskThroughput", diskThroughput) + .add("diskType", diskType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java index 3e04994c6..249bc815e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Describes the disk type. */ @Generated +@JsonSerialize(using = DiskType.DiskTypeSerializer.class) +@JsonDeserialize(using = DiskType.DiskTypeDeserializer.class) public class DiskType { /** * All Azure Disk types that Databricks supports. See * https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks */ - @JsonProperty("azure_disk_volume_type") private DiskTypeAzureDiskVolumeType azureDiskVolumeType; /** * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for * details. */ - @JsonProperty("ebs_volume_type") private DiskTypeEbsVolumeType ebsVolumeType; public DiskType setAzureDiskVolumeType(DiskTypeAzureDiskVolumeType azureDiskVolumeType) { @@ -63,4 +72,39 @@ public String toString() { .add("ebsVolumeType", ebsVolumeType) .toString(); } + + DiskTypePb toPb() { + DiskTypePb pb = new DiskTypePb(); + pb.setAzureDiskVolumeType(azureDiskVolumeType); + pb.setEbsVolumeType(ebsVolumeType); + + return pb; + } + + static DiskType fromPb(DiskTypePb pb) { + DiskType model = new DiskType(); + model.setAzureDiskVolumeType(pb.getAzureDiskVolumeType()); + model.setEbsVolumeType(pb.getEbsVolumeType()); + + return model; + } + + public static class DiskTypeSerializer extends JsonSerializer { + @Override + public void serialize(DiskType value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DiskTypePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DiskTypeDeserializer extends JsonDeserializer { + @Override + public DiskType deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DiskTypePb pb = mapper.readValue(p, DiskTypePb.class); + return DiskType.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypePb.java new file mode 100755 index 000000000..a62552199 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Describes the disk type. */ +@Generated +class DiskTypePb { + @JsonProperty("azure_disk_volume_type") + private DiskTypeAzureDiskVolumeType azureDiskVolumeType; + + @JsonProperty("ebs_volume_type") + private DiskTypeEbsVolumeType ebsVolumeType; + + public DiskTypePb setAzureDiskVolumeType(DiskTypeAzureDiskVolumeType azureDiskVolumeType) { + this.azureDiskVolumeType = azureDiskVolumeType; + return this; + } + + public DiskTypeAzureDiskVolumeType getAzureDiskVolumeType() { + return azureDiskVolumeType; + } + + public DiskTypePb setEbsVolumeType(DiskTypeEbsVolumeType ebsVolumeType) { + this.ebsVolumeType = ebsVolumeType; + return this; + } + + public DiskTypeEbsVolumeType getEbsVolumeType() { + return ebsVolumeType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DiskTypePb that = (DiskTypePb) o; + return Objects.equals(azureDiskVolumeType, that.azureDiskVolumeType) + && Objects.equals(ebsVolumeType, that.ebsVolumeType); + } + + @Override + public int hashCode() { + return Objects.hash(azureDiskVolumeType, ebsVolumeType); + } + + @Override + public String toString() { + return new ToStringer(DiskTypePb.class) + .add("azureDiskVolumeType", azureDiskVolumeType) + .add("ebsVolumeType", ebsVolumeType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuth.java index 58ff11bba..d20fe25fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuth.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DockerBasicAuth.DockerBasicAuthSerializer.class) +@JsonDeserialize(using = DockerBasicAuth.DockerBasicAuthDeserializer.class) public class DockerBasicAuth { /** Password of the user */ - @JsonProperty("password") private String password; /** Name of the user */ - @JsonProperty("username") private String username; public DockerBasicAuth setPassword(String password) { @@ -55,4 +64,40 @@ public String toString() { .add("username", username) .toString(); } + + DockerBasicAuthPb toPb() { + DockerBasicAuthPb pb = new DockerBasicAuthPb(); + pb.setPassword(password); + pb.setUsername(username); + + return pb; + } + + static DockerBasicAuth fromPb(DockerBasicAuthPb pb) { + DockerBasicAuth model = new DockerBasicAuth(); + model.setPassword(pb.getPassword()); + model.setUsername(pb.getUsername()); + + return model; + } + + public static class DockerBasicAuthSerializer extends JsonSerializer { + @Override + public void serialize(DockerBasicAuth value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DockerBasicAuthPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DockerBasicAuthDeserializer extends JsonDeserializer { + @Override + public DockerBasicAuth deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DockerBasicAuthPb pb = mapper.readValue(p, DockerBasicAuthPb.class); + return DockerBasicAuth.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuthPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuthPb.java new file mode 100755 index 000000000..5716ab71f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasicAuthPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DockerBasicAuthPb { + @JsonProperty("password") + private String password; + + @JsonProperty("username") + private String username; + + public DockerBasicAuthPb setPassword(String password) { + this.password = password; + return this; + } + + public String getPassword() { + return password; + } + + public DockerBasicAuthPb setUsername(String username) { + this.username = username; + return this; + } + + public String getUsername() { + return username; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DockerBasicAuthPb that = (DockerBasicAuthPb) o; + return Objects.equals(password, that.password) && Objects.equals(username, that.username); + } + + @Override + public int hashCode() { + return Objects.hash(password, username); + } + + @Override + public String toString() { + return new ToStringer(DockerBasicAuthPb.class) + .add("password", password) + .add("username", username) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java index 8e435bd76..ab92f5ecf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DockerImage.DockerImageSerializer.class) +@JsonDeserialize(using = DockerImage.DockerImageDeserializer.class) public class DockerImage { /** Basic auth with username and password */ - @JsonProperty("basic_auth") private DockerBasicAuth basicAuth; /** URL of the docker image. */ - @JsonProperty("url") private String url; public DockerImage setBasicAuth(DockerBasicAuth basicAuth) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(DockerImage.class).add("basicAuth", basicAuth).add("url", url).toString(); } + + DockerImagePb toPb() { + DockerImagePb pb = new DockerImagePb(); + pb.setBasicAuth(basicAuth); + pb.setUrl(url); + + return pb; + } + + static DockerImage fromPb(DockerImagePb pb) { + DockerImage model = new DockerImage(); + model.setBasicAuth(pb.getBasicAuth()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class DockerImageSerializer extends JsonSerializer { + @Override + public void serialize(DockerImage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DockerImagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DockerImageDeserializer extends JsonDeserializer { + @Override + public DockerImage deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DockerImagePb pb = mapper.readValue(p, DockerImagePb.class); + return DockerImage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImagePb.java new file mode 100755 index 000000000..78847b8e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImagePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DockerImagePb { + @JsonProperty("basic_auth") + private DockerBasicAuth basicAuth; + + @JsonProperty("url") + private String url; + + public DockerImagePb setBasicAuth(DockerBasicAuth basicAuth) { + this.basicAuth = basicAuth; + return this; + } + + public DockerBasicAuth getBasicAuth() { + return basicAuth; + } + + public DockerImagePb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DockerImagePb that = (DockerImagePb) o; + return Objects.equals(basicAuth, that.basicAuth) && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(basicAuth, url); + } + + @Override + public String toString() { + return new ToStringer(DockerImagePb.class) + .add("basicAuth", basicAuth) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index bbf12f00d..09e66dc83 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = EditCluster.EditClusterSerializer.class) +@JsonDeserialize(using = EditCluster.EditClusterDeserializer.class) public class EditCluster { /** * When set to true, fixed and default values from the policy will be used for fields that are * omitted. When set to false, only fixed values from the policy will be applied. */ - @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -31,25 +40,21 @@ public class EditCluster { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** ID of the cluster */ - @JsonProperty("cluster_id") private String clusterId; /** @@ -59,7 +64,6 @@ public class EditCluster { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** @@ -67,7 +71,6 @@ public class EditCluster { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** @@ -79,7 +82,6 @@ public class EditCluster { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -109,18 +111,15 @@ public class EditCluster { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -131,7 +130,6 @@ public class EditCluster { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -139,18 +137,15 @@ public class EditCluster { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -158,11 +153,9 @@ public class EditCluster { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -171,7 +164,6 @@ public class EditCluster { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** @@ -189,7 +181,6 @@ public class EditCluster { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** @@ -198,7 +189,6 @@ public class EditCluster { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -211,21 +201,11 @@ public class EditCluster { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -235,11 +215,9 @@ public class EditCluster { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -247,7 +225,6 @@ public class EditCluster { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -263,14 +240,12 @@ public class EditCluster { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** @@ -278,24 +253,17 @@ public class EditCluster { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public EditCluster setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { @@ -505,24 +473,6 @@ public String getPolicyId() { return policyId; } - public EditCluster setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public EditCluster setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public EditCluster setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -577,15 +527,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public EditCluster setTotalInitialRemoteShuffleDiskSize(Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public EditCluster setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -632,15 +573,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -671,15 +609,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -710,17 +645,107 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + EditClusterPb toPb() { + EditClusterPb pb = new EditClusterPb(); + pb.setApplyPolicyDefaultValues(applyPolicyDefaultValues); + pb.setAutoscale(autoscale); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterId(clusterId); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterName(clusterName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDockerImage(dockerImage); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setKind(kind); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSshPublicKeys(sshPublicKeys); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static EditCluster fromPb(EditClusterPb pb) { + EditCluster model = new EditCluster(); + model.setApplyPolicyDefaultValues(pb.getApplyPolicyDefaultValues()); + model.setAutoscale(pb.getAutoscale()); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterId(pb.getClusterId()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterName(pb.getClusterName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDockerImage(pb.getDockerImage()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setKind(pb.getKind()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class EditClusterSerializer extends JsonSerializer { + @Override + public void serialize(EditCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditClusterDeserializer extends JsonDeserializer { + @Override + public EditCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditClusterPb pb = mapper.readValue(p, EditClusterPb.class); + return EditCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterPb.java new file mode 100755 index 000000000..0c4a772a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterPb.java @@ -0,0 +1,496 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class EditClusterPb { + @JsonProperty("apply_policy_default_values") + private Boolean applyPolicyDefaultValues; + + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public EditClusterPb setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { + this.applyPolicyDefaultValues = applyPolicyDefaultValues; + return this; + } + + public Boolean getApplyPolicyDefaultValues() { + return applyPolicyDefaultValues; + } + + public EditClusterPb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public EditClusterPb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public EditClusterPb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public EditClusterPb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public EditClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public EditClusterPb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public EditClusterPb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public EditClusterPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public EditClusterPb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public EditClusterPb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public EditClusterPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public EditClusterPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public EditClusterPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public EditClusterPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public EditClusterPb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public EditClusterPb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public EditClusterPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public EditClusterPb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public EditClusterPb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public EditClusterPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public EditClusterPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public EditClusterPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public EditClusterPb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public EditClusterPb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public EditClusterPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public EditClusterPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public EditClusterPb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public EditClusterPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public EditClusterPb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public EditClusterPb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditClusterPb that = (EditClusterPb) o; + return Objects.equals(applyPolicyDefaultValues, that.applyPolicyDefaultValues) + && Objects.equals(autoscale, that.autoscale) + && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(kind, that.kind) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + applyPolicyDefaultValues, + autoscale, + autoterminationMinutes, + awsAttributes, + azureAttributes, + clusterId, + clusterLogConf, + clusterName, + customTags, + dataSecurityMode, + dockerImage, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + kind, + nodeTypeId, + numWorkers, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkEnvVars, + sparkVersion, + sshPublicKeys, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(EditClusterPb.class) + .add("applyPolicyDefaultValues", applyPolicyDefaultValues) + .add("autoscale", autoscale) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterId", clusterId) + .add("clusterLogConf", clusterLogConf) + .add("clusterName", clusterName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("dockerImage", dockerImage) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("kind", kind) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("sshPublicKeys", sshPublicKeys) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java index fe1297b0a..95dffb317 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditClusterResponse.EditClusterResponseSerializer.class) +@JsonDeserialize(using = EditClusterResponse.EditClusterResponseDeserializer.class) public class EditClusterResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(EditClusterResponse.class).toString(); } + + EditClusterResponsePb toPb() { + EditClusterResponsePb pb = new EditClusterResponsePb(); + + return pb; + } + + static EditClusterResponse fromPb(EditClusterResponsePb pb) { + EditClusterResponse model = new EditClusterResponse(); + + return model; + } + + public static class EditClusterResponseSerializer extends JsonSerializer { + @Override + public void serialize(EditClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditClusterResponseDeserializer + extends JsonDeserializer { + @Override + public EditClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditClusterResponsePb pb = mapper.readValue(p, EditClusterResponsePb.class); + return EditClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponsePb.java new file mode 100755 index 000000000..564f99841 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java index 5f1c2d218..e8397cc52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = EditInstancePool.EditInstancePoolSerializer.class) +@JsonDeserialize(using = EditInstancePool.EditInstancePoolDeserializer.class) public class EditInstancePool { /** * Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances @@ -16,7 +27,6 @@ public class EditInstancePool { * *

- Currently, Databricks allows at most 45 custom tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -26,18 +36,15 @@ public class EditInstancePool { * must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove * idle instances from the cache if min cache size could still hold. */ - @JsonProperty("idle_instance_autotermination_minutes") private Long idleInstanceAutoterminationMinutes; /** Instance pool ID */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** * Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 * characters. */ - @JsonProperty("instance_pool_name") private String instancePoolName; /** @@ -45,11 +52,9 @@ public class EditInstancePool { * clusters and idle instances. Clusters that require further instance provisioning will fail * during upsize requests. */ - @JsonProperty("max_capacity") private Long maxCapacity; /** Minimum number of idle instances to keep in the instance pool */ - @JsonProperty("min_idle_instances") private Long minIdleInstances; /** @@ -58,7 +63,6 @@ public class EditInstancePool { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; public EditInstancePool setCustomTags(Map customTags) { @@ -164,4 +168,50 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .toString(); } + + EditInstancePoolPb toPb() { + EditInstancePoolPb pb = new EditInstancePoolPb(); + pb.setCustomTags(customTags); + pb.setIdleInstanceAutoterminationMinutes(idleInstanceAutoterminationMinutes); + pb.setInstancePoolId(instancePoolId); + pb.setInstancePoolName(instancePoolName); + pb.setMaxCapacity(maxCapacity); + pb.setMinIdleInstances(minIdleInstances); + pb.setNodeTypeId(nodeTypeId); + + return pb; + } + + static EditInstancePool fromPb(EditInstancePoolPb pb) { + EditInstancePool model = new EditInstancePool(); + model.setCustomTags(pb.getCustomTags()); + model.setIdleInstanceAutoterminationMinutes(pb.getIdleInstanceAutoterminationMinutes()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setInstancePoolName(pb.getInstancePoolName()); + model.setMaxCapacity(pb.getMaxCapacity()); + model.setMinIdleInstances(pb.getMinIdleInstances()); + model.setNodeTypeId(pb.getNodeTypeId()); + + return model; + } + + public static class EditInstancePoolSerializer extends JsonSerializer { + @Override + public void serialize(EditInstancePool value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditInstancePoolPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditInstancePoolDeserializer extends JsonDeserializer { + @Override + public EditInstancePool deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditInstancePoolPb pb = mapper.readValue(p, EditInstancePoolPb.class); + return EditInstancePool.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolPb.java new file mode 100755 index 000000000..9469a6853 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class EditInstancePoolPb { + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("idle_instance_autotermination_minutes") + private Long idleInstanceAutoterminationMinutes; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("instance_pool_name") + private String instancePoolName; + + @JsonProperty("max_capacity") + private Long maxCapacity; + + @JsonProperty("min_idle_instances") + private Long minIdleInstances; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + public EditInstancePoolPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public EditInstancePoolPb setIdleInstanceAutoterminationMinutes( + Long idleInstanceAutoterminationMinutes) { + this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; + return this; + } + + public Long getIdleInstanceAutoterminationMinutes() { + return idleInstanceAutoterminationMinutes; + } + + public EditInstancePoolPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public EditInstancePoolPb setInstancePoolName(String instancePoolName) { + this.instancePoolName = instancePoolName; + return this; + } + + public String getInstancePoolName() { + return instancePoolName; + } + + public EditInstancePoolPb setMaxCapacity(Long maxCapacity) { + this.maxCapacity = maxCapacity; + return this; + } + + public Long getMaxCapacity() { + return maxCapacity; + } + + public EditInstancePoolPb setMinIdleInstances(Long minIdleInstances) { + this.minIdleInstances = minIdleInstances; + return this; + } + + public Long getMinIdleInstances() { + return minIdleInstances; + } + + public EditInstancePoolPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditInstancePoolPb that = (EditInstancePoolPb) o; + return Objects.equals(customTags, that.customTags) + && Objects.equals( + idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(instancePoolName, that.instancePoolName) + && Objects.equals(maxCapacity, that.maxCapacity) + && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeId, that.nodeTypeId); + } + + @Override + public int hashCode() { + return Objects.hash( + customTags, + idleInstanceAutoterminationMinutes, + instancePoolId, + instancePoolName, + maxCapacity, + minIdleInstances, + nodeTypeId); + } + + @Override + public String toString() { + return new ToStringer(EditInstancePoolPb.class) + .add("customTags", customTags) + .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) + .add("instancePoolId", instancePoolId) + .add("instancePoolName", instancePoolName) + .add("maxCapacity", maxCapacity) + .add("minIdleInstances", minIdleInstances) + .add("nodeTypeId", nodeTypeId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java index a2c4bbdd3..956647d4d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditInstancePoolResponse.EditInstancePoolResponseSerializer.class) +@JsonDeserialize(using = EditInstancePoolResponse.EditInstancePoolResponseDeserializer.class) public class EditInstancePoolResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(EditInstancePoolResponse.class).toString(); } + + EditInstancePoolResponsePb toPb() { + EditInstancePoolResponsePb pb = new EditInstancePoolResponsePb(); + + return pb; + } + + static EditInstancePoolResponse fromPb(EditInstancePoolResponsePb pb) { + EditInstancePoolResponse model = new EditInstancePoolResponse(); + + return model; + } + + public static class EditInstancePoolResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + EditInstancePoolResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditInstancePoolResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditInstancePoolResponseDeserializer + extends JsonDeserializer { + @Override + public EditInstancePoolResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditInstancePoolResponsePb pb = mapper.readValue(p, EditInstancePoolResponsePb.class); + return EditInstancePoolResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponsePb.java new file mode 100755 index 000000000..20066fe0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditInstancePoolResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditInstancePoolResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java index d81f9cf4d..7c5042305 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EditPolicy.EditPolicySerializer.class) +@JsonDeserialize(using = EditPolicy.EditPolicyDeserializer.class) public class EditPolicy { /** * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -16,32 +27,27 @@ public class EditPolicy { *

[Databricks Cluster Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("definition") private String definition; /** Additional human-readable description of the cluster policy. */ - @JsonProperty("description") private String description; /** * A list of libraries to be installed on the next cluster restart that uses this policy. The * maximum number of libraries is 500. */ - @JsonProperty("libraries") private Collection libraries; /** * Max number of clusters per user that can be active using this policy. If not present, there is * no max limit. */ - @JsonProperty("max_clusters_per_user") private Long maxClustersPerUser; /** * Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and * 100 characters. */ - @JsonProperty("name") private String name; /** @@ -54,7 +60,6 @@ public class EditPolicy { *

[Databricks Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; /** @@ -64,11 +69,9 @@ public class EditPolicy { *

Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to * customize the policy definition. */ - @JsonProperty("policy_family_id") private String policyFamilyId; /** The ID of the policy to update. */ - @JsonProperty("policy_id") private String policyId; public EditPolicy setDefinition(String definition) { @@ -184,4 +187,51 @@ public String toString() { .add("policyId", policyId) .toString(); } + + EditPolicyPb toPb() { + EditPolicyPb pb = new EditPolicyPb(); + pb.setDefinition(definition); + pb.setDescription(description); + pb.setLibraries(libraries); + pb.setMaxClustersPerUser(maxClustersPerUser); + pb.setName(name); + pb.setPolicyFamilyDefinitionOverrides(policyFamilyDefinitionOverrides); + pb.setPolicyFamilyId(policyFamilyId); + pb.setPolicyId(policyId); + + return pb; + } + + static EditPolicy fromPb(EditPolicyPb pb) { + EditPolicy model = new EditPolicy(); + model.setDefinition(pb.getDefinition()); + model.setDescription(pb.getDescription()); + model.setLibraries(pb.getLibraries()); + model.setMaxClustersPerUser(pb.getMaxClustersPerUser()); + model.setName(pb.getName()); + model.setPolicyFamilyDefinitionOverrides(pb.getPolicyFamilyDefinitionOverrides()); + model.setPolicyFamilyId(pb.getPolicyFamilyId()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class EditPolicySerializer extends JsonSerializer { + @Override + public void serialize(EditPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditPolicyDeserializer extends JsonDeserializer { + @Override + public EditPolicy deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditPolicyPb pb = mapper.readValue(p, EditPolicyPb.class); + return EditPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyPb.java new file mode 100755 index 000000000..c44df12d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyPb.java @@ -0,0 +1,150 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EditPolicyPb { + @JsonProperty("definition") + private String definition; + + @JsonProperty("description") + private String description; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("max_clusters_per_user") + private Long maxClustersPerUser; + + @JsonProperty("name") + private String name; + + @JsonProperty("policy_family_definition_overrides") + private String policyFamilyDefinitionOverrides; + + @JsonProperty("policy_family_id") + private String policyFamilyId; + + @JsonProperty("policy_id") + private String policyId; + + public EditPolicyPb setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public EditPolicyPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public EditPolicyPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public EditPolicyPb setMaxClustersPerUser(Long maxClustersPerUser) { + this.maxClustersPerUser = maxClustersPerUser; + return this; + } + + public Long getMaxClustersPerUser() { + return maxClustersPerUser; + } + + public EditPolicyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EditPolicyPb setPolicyFamilyDefinitionOverrides(String policyFamilyDefinitionOverrides) { + this.policyFamilyDefinitionOverrides = policyFamilyDefinitionOverrides; + return this; + } + + public String getPolicyFamilyDefinitionOverrides() { + return policyFamilyDefinitionOverrides; + } + + public EditPolicyPb setPolicyFamilyId(String policyFamilyId) { + this.policyFamilyId = policyFamilyId; + return this; + } + + public String getPolicyFamilyId() { + return policyFamilyId; + } + + public EditPolicyPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditPolicyPb that = (EditPolicyPb) o; + return Objects.equals(definition, that.definition) + && Objects.equals(description, that.description) + && Objects.equals(libraries, that.libraries) + && Objects.equals(maxClustersPerUser, that.maxClustersPerUser) + && Objects.equals(name, that.name) + && Objects.equals(policyFamilyDefinitionOverrides, that.policyFamilyDefinitionOverrides) + && Objects.equals(policyFamilyId, that.policyFamilyId) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash( + definition, + description, + libraries, + maxClustersPerUser, + name, + policyFamilyDefinitionOverrides, + policyFamilyId, + policyId); + } + + @Override + public String toString() { + return new ToStringer(EditPolicyPb.class) + .add("definition", definition) + .add("description", description) + .add("libraries", libraries) + .add("maxClustersPerUser", maxClustersPerUser) + .add("name", name) + .add("policyFamilyDefinitionOverrides", policyFamilyDefinitionOverrides) + .add("policyFamilyId", policyFamilyId) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java index e2931443b..5bd0688ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditPolicyResponse.EditPolicyResponseSerializer.class) +@JsonDeserialize(using = EditPolicyResponse.EditPolicyResponseDeserializer.class) public class EditPolicyResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(EditPolicyResponse.class).toString(); } + + EditPolicyResponsePb toPb() { + EditPolicyResponsePb pb = new EditPolicyResponsePb(); + + return pb; + } + + static EditPolicyResponse fromPb(EditPolicyResponsePb pb) { + EditPolicyResponse model = new EditPolicyResponse(); + + return model; + } + + public static class EditPolicyResponseSerializer extends JsonSerializer { + @Override + public void serialize(EditPolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditPolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditPolicyResponseDeserializer extends JsonDeserializer { + @Override + public EditPolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditPolicyResponsePb pb = mapper.readValue(p, EditPolicyResponsePb.class); + return EditPolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponsePb.java new file mode 100755 index 000000000..9e85fd432 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditPolicyResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditPolicyResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java index 35a7dba24..ff3c38b82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditResponse.EditResponseSerializer.class) +@JsonDeserialize(using = EditResponse.EditResponseDeserializer.class) public class EditResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(EditResponse.class).toString(); } + + EditResponsePb toPb() { + EditResponsePb pb = new EditResponsePb(); + + return pb; + } + + static EditResponse fromPb(EditResponsePb pb) { + EditResponse model = new EditResponse(); + + return model; + } + + public static class EditResponseSerializer extends JsonSerializer { + @Override + public void serialize(EditResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditResponseDeserializer extends JsonDeserializer { + @Override + public EditResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditResponsePb pb = mapper.readValue(p, EditResponsePb.class); + return EditResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponsePb.java new file mode 100755 index 000000000..2f3310a24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java index 82d881714..66c0c8d1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = EnforceClusterComplianceRequest.EnforceClusterComplianceRequestSerializer.class) +@JsonDeserialize( + using = EnforceClusterComplianceRequest.EnforceClusterComplianceRequestDeserializer.class) public class EnforceClusterComplianceRequest { /** The ID of the cluster you want to enforce policy compliance on. */ - @JsonProperty("cluster_id") private String clusterId; /** * If set, previews the changes that would be made to a cluster to enforce compliance but does not * update the cluster. */ - @JsonProperty("validate_only") private Boolean validateOnly; public EnforceClusterComplianceRequest setClusterId(String clusterId) { @@ -59,4 +70,44 @@ public String toString() { .add("validateOnly", validateOnly) .toString(); } + + EnforceClusterComplianceRequestPb toPb() { + EnforceClusterComplianceRequestPb pb = new EnforceClusterComplianceRequestPb(); + pb.setClusterId(clusterId); + pb.setValidateOnly(validateOnly); + + return pb; + } + + static EnforceClusterComplianceRequest fromPb(EnforceClusterComplianceRequestPb pb) { + EnforceClusterComplianceRequest model = new EnforceClusterComplianceRequest(); + model.setClusterId(pb.getClusterId()); + model.setValidateOnly(pb.getValidateOnly()); + + return model; + } + + public static class EnforceClusterComplianceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + EnforceClusterComplianceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnforceClusterComplianceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnforceClusterComplianceRequestDeserializer + extends JsonDeserializer { + @Override + public EnforceClusterComplianceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnforceClusterComplianceRequestPb pb = + mapper.readValue(p, EnforceClusterComplianceRequestPb.class); + return EnforceClusterComplianceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequestPb.java new file mode 100755 index 000000000..3cf6219d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnforceClusterComplianceRequestPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("validate_only") + private Boolean validateOnly; + + public EnforceClusterComplianceRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public EnforceClusterComplianceRequestPb setValidateOnly(Boolean validateOnly) { + this.validateOnly = validateOnly; + return this; + } + + public Boolean getValidateOnly() { + return validateOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnforceClusterComplianceRequestPb that = (EnforceClusterComplianceRequestPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(validateOnly, that.validateOnly); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, validateOnly); + } + + @Override + public String toString() { + return new ToStringer(EnforceClusterComplianceRequestPb.class) + .add("clusterId", clusterId) + .add("validateOnly", validateOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java index 8cc88b0c6..21b5ae1d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java @@ -4,24 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = EnforceClusterComplianceResponse.EnforceClusterComplianceResponseSerializer.class) +@JsonDeserialize( + using = EnforceClusterComplianceResponse.EnforceClusterComplianceResponseDeserializer.class) public class EnforceClusterComplianceResponse { /** * A list of changes that have been made to the cluster settings for the cluster to become * compliant with its policy. */ - @JsonProperty("changes") private Collection changes; /** * Whether any changes have been made to the cluster settings for the cluster to become compliant * with its policy. */ - @JsonProperty("has_changes") private Boolean hasChanges; public EnforceClusterComplianceResponse setChanges(Collection changes) { @@ -62,4 +73,44 @@ public String toString() { .add("hasChanges", hasChanges) .toString(); } + + EnforceClusterComplianceResponsePb toPb() { + EnforceClusterComplianceResponsePb pb = new EnforceClusterComplianceResponsePb(); + pb.setChanges(changes); + pb.setHasChanges(hasChanges); + + return pb; + } + + static EnforceClusterComplianceResponse fromPb(EnforceClusterComplianceResponsePb pb) { + EnforceClusterComplianceResponse model = new EnforceClusterComplianceResponse(); + model.setChanges(pb.getChanges()); + model.setHasChanges(pb.getHasChanges()); + + return model; + } + + public static class EnforceClusterComplianceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + EnforceClusterComplianceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnforceClusterComplianceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnforceClusterComplianceResponseDeserializer + extends JsonDeserializer { + @Override + public EnforceClusterComplianceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnforceClusterComplianceResponsePb pb = + mapper.readValue(p, EnforceClusterComplianceResponsePb.class); + return EnforceClusterComplianceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponsePb.java new file mode 100755 index 000000000..858e016f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EnforceClusterComplianceResponsePb { + @JsonProperty("changes") + private Collection changes; + + @JsonProperty("has_changes") + private Boolean hasChanges; + + public EnforceClusterComplianceResponsePb setChanges(Collection changes) { + this.changes = changes; + return this; + } + + public Collection getChanges() { + return changes; + } + + public EnforceClusterComplianceResponsePb setHasChanges(Boolean hasChanges) { + this.hasChanges = hasChanges; + return this; + } + + public Boolean getHasChanges() { + return hasChanges; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnforceClusterComplianceResponsePb that = (EnforceClusterComplianceResponsePb) o; + return Objects.equals(changes, that.changes) && Objects.equals(hasChanges, that.hasChanges); + } + + @Override + public int hashCode() { + return Objects.hash(changes, hasChanges); + } + + @Override + public String toString() { + return new ToStringer(EnforceClusterComplianceResponsePb.class) + .add("changes", changes) + .add("hasChanges", hasChanges) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index d6dcdf38c..9ea12b713 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -14,41 +23,31 @@ * environment spec, only pip dependencies are supported. */ @Generated +@JsonSerialize(using = Environment.EnvironmentSerializer.class) +@JsonDeserialize(using = Environment.EnvironmentDeserializer.class) public class Environment { - /** - * Client version used by the environment The client is the user-facing environment of the - * runtime. Each client comes with a specific set of pre-installed libraries. The version is a - * string, consisting of the major client version. - */ - @JsonProperty("client") + /** Use `environment_version` instead. */ private String client; /** * List of pip dependencies, as supported by the version of pip in this environment. Each - * dependency is a pip requirement file line - * https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be - * , , (WSFS or Volumes in - * Databricks), E.g. dependencies: ["foo==0.0.1", "-r - * /Workspace/test/requirements.txt"] + * dependency is a valid pip requirements file line per + * https://pip.pypa.io/en/stable/reference/requirements-file-format/. Allowed dependencies include + * a requirement specifier, an archive URL, a local project path (such as WSFS or UC Volumes in + * Databricks), or a VCS project URL. */ - @JsonProperty("dependencies") private Collection dependencies; /** - * We renamed `client` to `environment_version` in notebook exports. This field is meant solely so - * that imported notebooks with `environment_version` can be deserialized correctly, in a - * backwards-compatible way (i.e. if `client` is specified instead of `environment_version`, it - * will be deserialized correctly). Do NOT use this field for any other purpose, e.g. notebook - * storage. This field is not yet exposed to customers (e.g. in the jobs API). + * Required. Environment version used by the environment. Each version comes with a specific + * Python version and a set of Python packages. The version is a string, consisting of an integer. */ - @JsonProperty("environment_version") private String environmentVersion; /** * List of jar dependencies, should be string representing volume paths. For example: * `/Volumes/path/to/test.jar`. */ - @JsonProperty("jar_dependencies") private Collection jarDependencies; public Environment setClient(String client) { @@ -112,4 +111,43 @@ public String toString() { .add("jarDependencies", jarDependencies) .toString(); } + + EnvironmentPb toPb() { + EnvironmentPb pb = new EnvironmentPb(); + pb.setClient(client); + pb.setDependencies(dependencies); + pb.setEnvironmentVersion(environmentVersion); + pb.setJarDependencies(jarDependencies); + + return pb; + } + + static Environment fromPb(EnvironmentPb pb) { + Environment model = new Environment(); + model.setClient(pb.getClient()); + model.setDependencies(pb.getDependencies()); + model.setEnvironmentVersion(pb.getEnvironmentVersion()); + model.setJarDependencies(pb.getJarDependencies()); + + return model; + } + + public static class EnvironmentSerializer extends JsonSerializer { + @Override + public void serialize(Environment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnvironmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnvironmentDeserializer extends JsonDeserializer { + @Override + public Environment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnvironmentPb pb = mapper.readValue(p, EnvironmentPb.class); + return Environment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnvironmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnvironmentPb.java new file mode 100755 index 000000000..aaed6a3b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnvironmentPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The environment entity used to preserve serverless environment side panel, jobs' environment for + * non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal + * environment spec, only pip dependencies are supported. + */ +@Generated +class EnvironmentPb { + @JsonProperty("client") + private String client; + + @JsonProperty("dependencies") + private Collection dependencies; + + @JsonProperty("environment_version") + private String environmentVersion; + + @JsonProperty("jar_dependencies") + private Collection jarDependencies; + + public EnvironmentPb setClient(String client) { + this.client = client; + return this; + } + + public String getClient() { + return client; + } + + public EnvironmentPb setDependencies(Collection dependencies) { + this.dependencies = dependencies; + return this; + } + + public Collection getDependencies() { + return dependencies; + } + + public EnvironmentPb setEnvironmentVersion(String environmentVersion) { + this.environmentVersion = environmentVersion; + return this; + } + + public String getEnvironmentVersion() { + return environmentVersion; + } + + public EnvironmentPb setJarDependencies(Collection jarDependencies) { + this.jarDependencies = jarDependencies; + return this; + } + + public Collection getJarDependencies() { + return jarDependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnvironmentPb that = (EnvironmentPb) o; + return Objects.equals(client, that.client) + && Objects.equals(dependencies, that.dependencies) + && Objects.equals(environmentVersion, that.environmentVersion) + && Objects.equals(jarDependencies, that.jarDependencies); + } + + @Override + public int hashCode() { + return Objects.hash(client, dependencies, environmentVersion, jarDependencies); + } + + @Override + public String toString() { + return new ToStringer(EnvironmentPb.class) + .add("client", client) + .add("dependencies", dependencies) + .add("environmentVersion", environmentVersion) + .add("jarDependencies", jarDependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java index a365e67d5..9beeb874b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java @@ -4,100 +4,91 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EventDetails.EventDetailsSerializer.class) +@JsonDeserialize(using = EventDetails.EventDetailsDeserializer.class) public class EventDetails { /** * * For created clusters, the attributes of the cluster. * For edited clusters, the new * attributes of the cluster. */ - @JsonProperty("attributes") private ClusterAttributes attributes; /** The cause of a change in target size. */ - @JsonProperty("cause") private EventDetailsCause cause; /** The actual cluster size that was set in the cluster creation or edit. */ - @JsonProperty("cluster_size") private ClusterSize clusterSize; /** The current number of vCPUs in the cluster. */ - @JsonProperty("current_num_vcpus") private Long currentNumVcpus; /** The current number of nodes in the cluster. */ - @JsonProperty("current_num_workers") private Long currentNumWorkers; /** */ - @JsonProperty("did_not_expand_reason") private String didNotExpandReason; /** Current disk size in bytes */ - @JsonProperty("disk_size") private Long diskSize; /** More details about the change in driver's state */ - @JsonProperty("driver_state_message") private String driverStateMessage; /** * Whether or not a blocklisted node should be terminated. For ClusterEventType NODE_BLACKLISTED. */ - @JsonProperty("enable_termination_for_node_blocklisted") private Boolean enableTerminationForNodeBlocklisted; /** */ - @JsonProperty("free_space") private Long freeSpace; /** List of global and cluster init scripts associated with this cluster event. */ - @JsonProperty("init_scripts") private InitScriptEventDetails initScripts; /** Instance Id where the event originated from */ - @JsonProperty("instance_id") private String instanceId; /** * Unique identifier of the specific job run associated with this cluster event * For clusters * created for jobs, this will be the same as the cluster name */ - @JsonProperty("job_run_name") private String jobRunName; /** The cluster attributes before a cluster was edited. */ - @JsonProperty("previous_attributes") private ClusterAttributes previousAttributes; /** The size of the cluster before an edit or resize. */ - @JsonProperty("previous_cluster_size") private ClusterSize previousClusterSize; /** Previous disk size in bytes */ - @JsonProperty("previous_disk_size") private Long previousDiskSize; /** * A termination reason: * On a TERMINATED event, this is the reason of the termination. * On a * RESIZE_COMPLETE event, this indicates the reason that we failed to acquire some nodes. */ - @JsonProperty("reason") private TerminationReason reason; /** The targeted number of vCPUs in the cluster. */ - @JsonProperty("target_num_vcpus") private Long targetNumVcpus; /** The targeted number of nodes in the cluster. */ - @JsonProperty("target_num_workers") private Long targetNumWorkers; /** The user that caused the event to occur. (Empty if it was done by the control plane.) */ - @JsonProperty("user") private String user; public EventDetails setAttributes(ClusterAttributes attributes) { @@ -359,4 +350,75 @@ public String toString() { .add("user", user) .toString(); } + + EventDetailsPb toPb() { + EventDetailsPb pb = new EventDetailsPb(); + pb.setAttributes(attributes); + pb.setCause(cause); + pb.setClusterSize(clusterSize); + pb.setCurrentNumVcpus(currentNumVcpus); + pb.setCurrentNumWorkers(currentNumWorkers); + pb.setDidNotExpandReason(didNotExpandReason); + pb.setDiskSize(diskSize); + pb.setDriverStateMessage(driverStateMessage); + pb.setEnableTerminationForNodeBlocklisted(enableTerminationForNodeBlocklisted); + pb.setFreeSpace(freeSpace); + pb.setInitScripts(initScripts); + pb.setInstanceId(instanceId); + pb.setJobRunName(jobRunName); + pb.setPreviousAttributes(previousAttributes); + pb.setPreviousClusterSize(previousClusterSize); + pb.setPreviousDiskSize(previousDiskSize); + pb.setReason(reason); + pb.setTargetNumVcpus(targetNumVcpus); + pb.setTargetNumWorkers(targetNumWorkers); + pb.setUser(user); + + return pb; + } + + static EventDetails fromPb(EventDetailsPb pb) { + EventDetails model = new EventDetails(); + model.setAttributes(pb.getAttributes()); + model.setCause(pb.getCause()); + model.setClusterSize(pb.getClusterSize()); + model.setCurrentNumVcpus(pb.getCurrentNumVcpus()); + model.setCurrentNumWorkers(pb.getCurrentNumWorkers()); + model.setDidNotExpandReason(pb.getDidNotExpandReason()); + model.setDiskSize(pb.getDiskSize()); + model.setDriverStateMessage(pb.getDriverStateMessage()); + model.setEnableTerminationForNodeBlocklisted(pb.getEnableTerminationForNodeBlocklisted()); + model.setFreeSpace(pb.getFreeSpace()); + model.setInitScripts(pb.getInitScripts()); + model.setInstanceId(pb.getInstanceId()); + model.setJobRunName(pb.getJobRunName()); + model.setPreviousAttributes(pb.getPreviousAttributes()); + model.setPreviousClusterSize(pb.getPreviousClusterSize()); + model.setPreviousDiskSize(pb.getPreviousDiskSize()); + model.setReason(pb.getReason()); + model.setTargetNumVcpus(pb.getTargetNumVcpus()); + model.setTargetNumWorkers(pb.getTargetNumWorkers()); + model.setUser(pb.getUser()); + + return model; + } + + public static class EventDetailsSerializer extends JsonSerializer { + @Override + public void serialize(EventDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EventDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EventDetailsDeserializer extends JsonDeserializer { + @Override + public EventDetails deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EventDetailsPb pb = mapper.readValue(p, EventDetailsPb.class); + return EventDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsPb.java new file mode 100755 index 000000000..9df52079e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsPb.java @@ -0,0 +1,331 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EventDetailsPb { + @JsonProperty("attributes") + private ClusterAttributes attributes; + + @JsonProperty("cause") + private EventDetailsCause cause; + + @JsonProperty("cluster_size") + private ClusterSize clusterSize; + + @JsonProperty("current_num_vcpus") + private Long currentNumVcpus; + + @JsonProperty("current_num_workers") + private Long currentNumWorkers; + + @JsonProperty("did_not_expand_reason") + private String didNotExpandReason; + + @JsonProperty("disk_size") + private Long diskSize; + + @JsonProperty("driver_state_message") + private String driverStateMessage; + + @JsonProperty("enable_termination_for_node_blocklisted") + private Boolean enableTerminationForNodeBlocklisted; + + @JsonProperty("free_space") + private Long freeSpace; + + @JsonProperty("init_scripts") + private InitScriptEventDetails initScripts; + + @JsonProperty("instance_id") + private String instanceId; + + @JsonProperty("job_run_name") + private String jobRunName; + + @JsonProperty("previous_attributes") + private ClusterAttributes previousAttributes; + + @JsonProperty("previous_cluster_size") + private ClusterSize previousClusterSize; + + @JsonProperty("previous_disk_size") + private Long previousDiskSize; + + @JsonProperty("reason") + private TerminationReason reason; + + @JsonProperty("target_num_vcpus") + private Long targetNumVcpus; + + @JsonProperty("target_num_workers") + private Long targetNumWorkers; + + @JsonProperty("user") + private String user; + + public EventDetailsPb setAttributes(ClusterAttributes attributes) { + this.attributes = attributes; + return this; + } + + public ClusterAttributes getAttributes() { + return attributes; + } + + public EventDetailsPb setCause(EventDetailsCause cause) { + this.cause = cause; + return this; + } + + public EventDetailsCause getCause() { + return cause; + } + + public EventDetailsPb setClusterSize(ClusterSize clusterSize) { + this.clusterSize = clusterSize; + return this; + } + + public ClusterSize getClusterSize() { + return clusterSize; + } + + public EventDetailsPb setCurrentNumVcpus(Long currentNumVcpus) { + this.currentNumVcpus = currentNumVcpus; + return this; + } + + public Long getCurrentNumVcpus() { + return currentNumVcpus; + } + + public EventDetailsPb setCurrentNumWorkers(Long currentNumWorkers) { + this.currentNumWorkers = currentNumWorkers; + return this; + } + + public Long getCurrentNumWorkers() { + return currentNumWorkers; + } + + public EventDetailsPb setDidNotExpandReason(String didNotExpandReason) { + this.didNotExpandReason = didNotExpandReason; + return this; + } + + public String getDidNotExpandReason() { + return didNotExpandReason; + } + + public EventDetailsPb setDiskSize(Long diskSize) { + this.diskSize = diskSize; + return this; + } + + public Long getDiskSize() { + return diskSize; + } + + public EventDetailsPb setDriverStateMessage(String driverStateMessage) { + this.driverStateMessage = driverStateMessage; + return this; + } + + public String getDriverStateMessage() { + return driverStateMessage; + } + + public EventDetailsPb setEnableTerminationForNodeBlocklisted( + Boolean enableTerminationForNodeBlocklisted) { + this.enableTerminationForNodeBlocklisted = enableTerminationForNodeBlocklisted; + return this; + } + + public Boolean getEnableTerminationForNodeBlocklisted() { + return enableTerminationForNodeBlocklisted; + } + + public EventDetailsPb setFreeSpace(Long freeSpace) { + this.freeSpace = freeSpace; + return this; + } + + public Long getFreeSpace() { + return freeSpace; + } + + public EventDetailsPb setInitScripts(InitScriptEventDetails initScripts) { + this.initScripts = initScripts; + return this; + } + + public InitScriptEventDetails getInitScripts() { + return initScripts; + } + + public EventDetailsPb setInstanceId(String instanceId) { + this.instanceId = instanceId; + return this; + } + + public String getInstanceId() { + return instanceId; + } + + public EventDetailsPb setJobRunName(String jobRunName) { + this.jobRunName = jobRunName; + return this; + } + + public String getJobRunName() { + return jobRunName; + } + + public EventDetailsPb setPreviousAttributes(ClusterAttributes previousAttributes) { + this.previousAttributes = previousAttributes; + return this; + } + + public ClusterAttributes getPreviousAttributes() { + return previousAttributes; + } + + public EventDetailsPb setPreviousClusterSize(ClusterSize previousClusterSize) { + this.previousClusterSize = previousClusterSize; + return this; + } + + public ClusterSize getPreviousClusterSize() { + return previousClusterSize; + } + + public EventDetailsPb setPreviousDiskSize(Long previousDiskSize) { + this.previousDiskSize = previousDiskSize; + return this; + } + + public Long getPreviousDiskSize() { + return previousDiskSize; + } + + public EventDetailsPb setReason(TerminationReason reason) { + this.reason = reason; + return this; + } + + public TerminationReason getReason() { + return reason; + } + + public EventDetailsPb setTargetNumVcpus(Long targetNumVcpus) { + this.targetNumVcpus = targetNumVcpus; + return this; + } + + public Long getTargetNumVcpus() { + return targetNumVcpus; + } + + public EventDetailsPb setTargetNumWorkers(Long targetNumWorkers) { + this.targetNumWorkers = targetNumWorkers; + return this; + } + + public Long getTargetNumWorkers() { + return targetNumWorkers; + } + + public EventDetailsPb setUser(String user) { + this.user = user; + return this; + } + + public String getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EventDetailsPb that = (EventDetailsPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(cause, that.cause) + && Objects.equals(clusterSize, that.clusterSize) + && Objects.equals(currentNumVcpus, that.currentNumVcpus) + && Objects.equals(currentNumWorkers, that.currentNumWorkers) + && Objects.equals(didNotExpandReason, that.didNotExpandReason) + && Objects.equals(diskSize, that.diskSize) + && Objects.equals(driverStateMessage, that.driverStateMessage) + && Objects.equals( + enableTerminationForNodeBlocklisted, that.enableTerminationForNodeBlocklisted) + && Objects.equals(freeSpace, that.freeSpace) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instanceId, that.instanceId) + && Objects.equals(jobRunName, that.jobRunName) + && Objects.equals(previousAttributes, that.previousAttributes) + && Objects.equals(previousClusterSize, that.previousClusterSize) + && Objects.equals(previousDiskSize, that.previousDiskSize) + && Objects.equals(reason, that.reason) + && Objects.equals(targetNumVcpus, that.targetNumVcpus) + && Objects.equals(targetNumWorkers, that.targetNumWorkers) + && Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, + cause, + clusterSize, + currentNumVcpus, + currentNumWorkers, + didNotExpandReason, + diskSize, + driverStateMessage, + enableTerminationForNodeBlocklisted, + freeSpace, + initScripts, + instanceId, + jobRunName, + previousAttributes, + previousClusterSize, + previousDiskSize, + reason, + targetNumVcpus, + targetNumWorkers, + user); + } + + @Override + public String toString() { + return new ToStringer(EventDetailsPb.class) + .add("attributes", attributes) + .add("cause", cause) + .add("clusterSize", clusterSize) + .add("currentNumVcpus", currentNumVcpus) + .add("currentNumWorkers", currentNumWorkers) + .add("didNotExpandReason", didNotExpandReason) + .add("diskSize", diskSize) + .add("driverStateMessage", driverStateMessage) + .add("enableTerminationForNodeBlocklisted", enableTerminationForNodeBlocklisted) + .add("freeSpace", freeSpace) + .add("initScripts", initScripts) + .add("instanceId", instanceId) + .add("jobRunName", jobRunName) + .add("previousAttributes", previousAttributes) + .add("previousClusterSize", previousClusterSize) + .add("previousDiskSize", previousDiskSize) + .add("reason", reason) + .add("targetNumVcpus", targetNumVcpus) + .add("targetNumWorkers", targetNumWorkers) + .add("user", user) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java index bc250469e..c1a501e22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java @@ -11,6 +11,7 @@ public enum EventType { AUTOSCALING_BACKOFF, AUTOSCALING_FAILED, AUTOSCALING_STATS_REPORT, + CLUSTER_MIGRATED, CREATING, DBFS_DOWN, DID_NOT_EXPAND_DISK, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java index 61d5be7d7..1d4660770 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during cluster creation which are related to GCP. */ @Generated +@JsonSerialize(using = GcpAttributes.GcpAttributesSerializer.class) +@JsonDeserialize(using = GcpAttributes.GcpAttributesDeserializer.class) public class GcpAttributes { /** * This field determines whether the spark executors will be scheduled to run on preemptible VMs, * on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is * unavailable. */ - @JsonProperty("availability") private GcpAvailability availability; /** Boot disk size in GB */ - @JsonProperty("boot_disk_size") private Long bootDiskSize; /** @@ -27,7 +36,6 @@ public class GcpAttributes { * services (like GCS). The google service account must have previously been added to the * Databricks environment by an account administrator. */ - @JsonProperty("google_service_account") private String googleServiceAccount; /** @@ -38,7 +46,6 @@ public class GcpAttributes { *

[GCP documentation]: * https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds */ - @JsonProperty("local_ssd_count") private Long localSsdCount; /** @@ -46,7 +53,6 @@ public class GcpAttributes { * (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon * to be deprecated, use the 'availability' field instead. */ - @JsonProperty("use_preemptible_executors") private Boolean usePreemptibleExecutors; /** @@ -56,7 +62,6 @@ public class GcpAttributes { * cluster on. - A GCP availability zone => Pick One of the available zones for (machine type + * region) from https://cloud.google.com/compute/docs/regions-zones. */ - @JsonProperty("zone_id") private String zoneId; public GcpAttributes setAvailability(GcpAvailability availability) { @@ -148,4 +153,47 @@ public String toString() { .add("zoneId", zoneId) .toString(); } + + GcpAttributesPb toPb() { + GcpAttributesPb pb = new GcpAttributesPb(); + pb.setAvailability(availability); + pb.setBootDiskSize(bootDiskSize); + pb.setGoogleServiceAccount(googleServiceAccount); + pb.setLocalSsdCount(localSsdCount); + pb.setUsePreemptibleExecutors(usePreemptibleExecutors); + pb.setZoneId(zoneId); + + return pb; + } + + static GcpAttributes fromPb(GcpAttributesPb pb) { + GcpAttributes model = new GcpAttributes(); + model.setAvailability(pb.getAvailability()); + model.setBootDiskSize(pb.getBootDiskSize()); + model.setGoogleServiceAccount(pb.getGoogleServiceAccount()); + model.setLocalSsdCount(pb.getLocalSsdCount()); + model.setUsePreemptibleExecutors(pb.getUsePreemptibleExecutors()); + model.setZoneId(pb.getZoneId()); + + return model; + } + + public static class GcpAttributesSerializer extends JsonSerializer { + @Override + public void serialize(GcpAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpAttributesDeserializer extends JsonDeserializer { + @Override + public GcpAttributes deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpAttributesPb pb = mapper.readValue(p, GcpAttributesPb.class); + return GcpAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributesPb.java new file mode 100755 index 000000000..3a50d49b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributesPb.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during cluster creation which are related to GCP. */ +@Generated +class GcpAttributesPb { + @JsonProperty("availability") + private GcpAvailability availability; + + @JsonProperty("boot_disk_size") + private Long bootDiskSize; + + @JsonProperty("google_service_account") + private String googleServiceAccount; + + @JsonProperty("local_ssd_count") + private Long localSsdCount; + + @JsonProperty("use_preemptible_executors") + private Boolean usePreemptibleExecutors; + + @JsonProperty("zone_id") + private String zoneId; + + public GcpAttributesPb setAvailability(GcpAvailability availability) { + this.availability = availability; + return this; + } + + public GcpAvailability getAvailability() { + return availability; + } + + public GcpAttributesPb setBootDiskSize(Long bootDiskSize) { + this.bootDiskSize = bootDiskSize; + return this; + } + + public Long getBootDiskSize() { + return bootDiskSize; + } + + public GcpAttributesPb setGoogleServiceAccount(String googleServiceAccount) { + this.googleServiceAccount = googleServiceAccount; + return this; + } + + public String getGoogleServiceAccount() { + return googleServiceAccount; + } + + public GcpAttributesPb setLocalSsdCount(Long localSsdCount) { + this.localSsdCount = localSsdCount; + return this; + } + + public Long getLocalSsdCount() { + return localSsdCount; + } + + public GcpAttributesPb setUsePreemptibleExecutors(Boolean usePreemptibleExecutors) { + this.usePreemptibleExecutors = usePreemptibleExecutors; + return this; + } + + public Boolean getUsePreemptibleExecutors() { + return usePreemptibleExecutors; + } + + public GcpAttributesPb setZoneId(String zoneId) { + this.zoneId = zoneId; + return this; + } + + public String getZoneId() { + return zoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpAttributesPb that = (GcpAttributesPb) o; + return Objects.equals(availability, that.availability) + && Objects.equals(bootDiskSize, that.bootDiskSize) + && Objects.equals(googleServiceAccount, that.googleServiceAccount) + && Objects.equals(localSsdCount, that.localSsdCount) + && Objects.equals(usePreemptibleExecutors, that.usePreemptibleExecutors) + && Objects.equals(zoneId, that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash( + availability, + bootDiskSize, + googleServiceAccount, + localSsdCount, + usePreemptibleExecutors, + zoneId); + } + + @Override + public String toString() { + return new ToStringer(GcpAttributesPb.class) + .add("availability", availability) + .add("bootDiskSize", bootDiskSize) + .add("googleServiceAccount", googleServiceAccount) + .add("localSsdCount", localSsdCount) + .add("usePreemptibleExecutors", usePreemptibleExecutors) + .add("zoneId", zoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java index 819a421ef..4b39385dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location in Google Cloud Platform's GCS */ @Generated +@JsonSerialize(using = GcsStorageInfo.GcsStorageInfoSerializer.class) +@JsonDeserialize(using = GcsStorageInfo.GcsStorageInfoDeserializer.class) public class GcsStorageInfo { /** GCS destination/URI, e.g. `gs://my-bucket/some-prefix` */ - @JsonProperty("destination") private String destination; public GcsStorageInfo setDestination(String destination) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GcsStorageInfo.class).add("destination", destination).toString(); } + + GcsStorageInfoPb toPb() { + GcsStorageInfoPb pb = new GcsStorageInfoPb(); + pb.setDestination(destination); + + return pb; + } + + static GcsStorageInfo fromPb(GcsStorageInfoPb pb) { + GcsStorageInfo model = new GcsStorageInfo(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class GcsStorageInfoSerializer extends JsonSerializer { + @Override + public void serialize(GcsStorageInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcsStorageInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcsStorageInfoDeserializer extends JsonDeserializer { + @Override + public GcsStorageInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcsStorageInfoPb pb = mapper.readValue(p, GcsStorageInfoPb.class); + return GcsStorageInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfoPb.java new file mode 100755 index 000000000..1acc7e794 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location in Google Cloud Platform's GCS */ +@Generated +class GcsStorageInfoPb { + @JsonProperty("destination") + private String destination; + + public GcsStorageInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcsStorageInfoPb that = (GcsStorageInfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(GcsStorageInfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequest.java index 29719bb14..22106076b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster policy compliance */ @Generated +@JsonSerialize(using = GetClusterComplianceRequest.GetClusterComplianceRequestSerializer.class) +@JsonDeserialize(using = GetClusterComplianceRequest.GetClusterComplianceRequestDeserializer.class) public class GetClusterComplianceRequest { /** The ID of the cluster to get the compliance status */ - @JsonIgnore - @QueryParam("cluster_id") private String clusterId; public GetClusterComplianceRequest setClusterId(String clusterId) { @@ -42,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetClusterComplianceRequest.class).add("clusterId", clusterId).toString(); } + + GetClusterComplianceRequestPb toPb() { + GetClusterComplianceRequestPb pb = new GetClusterComplianceRequestPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static GetClusterComplianceRequest fromPb(GetClusterComplianceRequestPb pb) { + GetClusterComplianceRequest model = new GetClusterComplianceRequest(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class GetClusterComplianceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterComplianceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterComplianceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterComplianceRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterComplianceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterComplianceRequestPb pb = mapper.readValue(p, GetClusterComplianceRequestPb.class); + return GetClusterComplianceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequestPb.java new file mode 100755 index 000000000..200caae6a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster policy compliance */ +@Generated +class GetClusterComplianceRequestPb { + @JsonIgnore + @QueryParam("cluster_id") + private String clusterId; + + public GetClusterComplianceRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterComplianceRequestPb that = (GetClusterComplianceRequestPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterComplianceRequestPb.class) + .add("clusterId", clusterId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponse.java index 27af99b58..4e64df06b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponse.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = GetClusterComplianceResponse.GetClusterComplianceResponseSerializer.class) +@JsonDeserialize( + using = GetClusterComplianceResponse.GetClusterComplianceResponseDeserializer.class) public class GetClusterComplianceResponse { /** * Whether the cluster is compliant with its policy or not. Clusters could be out of compliance if * the policy was updated after the cluster was last edited. */ - @JsonProperty("is_compliant") private Boolean isCompliant; /** @@ -22,7 +33,6 @@ public class GetClusterComplianceResponse { * The keys indicate the path where the policy validation error is occurring. The values indicate * an error message describing the policy validation error. */ - @JsonProperty("violations") private Map violations; public GetClusterComplianceResponse setIsCompliant(Boolean isCompliant) { @@ -64,4 +74,43 @@ public String toString() { .add("violations", violations) .toString(); } + + GetClusterComplianceResponsePb toPb() { + GetClusterComplianceResponsePb pb = new GetClusterComplianceResponsePb(); + pb.setIsCompliant(isCompliant); + pb.setViolations(violations); + + return pb; + } + + static GetClusterComplianceResponse fromPb(GetClusterComplianceResponsePb pb) { + GetClusterComplianceResponse model = new GetClusterComplianceResponse(); + model.setIsCompliant(pb.getIsCompliant()); + model.setViolations(pb.getViolations()); + + return model; + } + + public static class GetClusterComplianceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterComplianceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterComplianceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterComplianceResponseDeserializer + extends JsonDeserializer { + @Override + public GetClusterComplianceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterComplianceResponsePb pb = mapper.readValue(p, GetClusterComplianceResponsePb.class); + return GetClusterComplianceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponsePb.java new file mode 100755 index 000000000..86633b516 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterComplianceResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class GetClusterComplianceResponsePb { + @JsonProperty("is_compliant") + private Boolean isCompliant; + + @JsonProperty("violations") + private Map violations; + + public GetClusterComplianceResponsePb setIsCompliant(Boolean isCompliant) { + this.isCompliant = isCompliant; + return this; + } + + public Boolean getIsCompliant() { + return isCompliant; + } + + public GetClusterComplianceResponsePb setViolations(Map violations) { + this.violations = violations; + return this; + } + + public Map getViolations() { + return violations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterComplianceResponsePb that = (GetClusterComplianceResponsePb) o; + return Objects.equals(isCompliant, that.isCompliant) + && Objects.equals(violations, that.violations); + } + + @Override + public int hashCode() { + return Objects.hash(isCompliant, violations); + } + + @Override + public String toString() { + return new ToStringer(GetClusterComplianceResponsePb.class) + .add("isCompliant", isCompliant) + .add("violations", violations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequest.java index 66e5703b2..58dae3716 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster permission levels */ @Generated +@JsonSerialize( + using = GetClusterPermissionLevelsRequest.GetClusterPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = GetClusterPermissionLevelsRequest.GetClusterPermissionLevelsRequestDeserializer.class) public class GetClusterPermissionLevelsRequest { /** The cluster for which to get or manage permissions. */ - @JsonIgnore private String clusterId; + private String clusterId; public GetClusterPermissionLevelsRequest setClusterId(String clusterId) { this.clusterId = clusterId; @@ -41,4 +54,42 @@ public String toString() { .add("clusterId", clusterId) .toString(); } + + GetClusterPermissionLevelsRequestPb toPb() { + GetClusterPermissionLevelsRequestPb pb = new GetClusterPermissionLevelsRequestPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static GetClusterPermissionLevelsRequest fromPb(GetClusterPermissionLevelsRequestPb pb) { + GetClusterPermissionLevelsRequest model = new GetClusterPermissionLevelsRequest(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class GetClusterPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterPermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPermissionLevelsRequestPb pb = + mapper.readValue(p, GetClusterPermissionLevelsRequestPb.class); + return GetClusterPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequestPb.java new file mode 100755 index 000000000..099322ccf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster permission levels */ +@Generated +class GetClusterPermissionLevelsRequestPb { + @JsonIgnore private String clusterId; + + public GetClusterPermissionLevelsRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPermissionLevelsRequestPb that = (GetClusterPermissionLevelsRequestPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPermissionLevelsRequestPb.class) + .add("clusterId", clusterId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponse.java index 356950d00..1ce65ff10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetClusterPermissionLevelsResponse.GetClusterPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = GetClusterPermissionLevelsResponse.GetClusterPermissionLevelsResponseDeserializer.class) public class GetClusterPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetClusterPermissionLevelsResponse setPermissionLevels( @@ -43,4 +55,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetClusterPermissionLevelsResponsePb toPb() { + GetClusterPermissionLevelsResponsePb pb = new GetClusterPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetClusterPermissionLevelsResponse fromPb(GetClusterPermissionLevelsResponsePb pb) { + GetClusterPermissionLevelsResponse model = new GetClusterPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetClusterPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetClusterPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPermissionLevelsResponsePb pb = + mapper.readValue(p, GetClusterPermissionLevelsResponsePb.class); + return GetClusterPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponsePb.java new file mode 100755 index 000000000..21e941db6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetClusterPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetClusterPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPermissionLevelsResponsePb that = (GetClusterPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequest.java index d8ded41ad..0998ff39f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster permissions */ @Generated +@JsonSerialize(using = GetClusterPermissionsRequest.GetClusterPermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetClusterPermissionsRequest.GetClusterPermissionsRequestDeserializer.class) public class GetClusterPermissionsRequest { /** The cluster for which to get or manage permissions. */ - @JsonIgnore private String clusterId; + private String clusterId; public GetClusterPermissionsRequest setClusterId(String clusterId) { this.clusterId = clusterId; @@ -41,4 +53,41 @@ public String toString() { .add("clusterId", clusterId) .toString(); } + + GetClusterPermissionsRequestPb toPb() { + GetClusterPermissionsRequestPb pb = new GetClusterPermissionsRequestPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static GetClusterPermissionsRequest fromPb(GetClusterPermissionsRequestPb pb) { + GetClusterPermissionsRequest model = new GetClusterPermissionsRequest(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class GetClusterPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPermissionsRequestPb pb = mapper.readValue(p, GetClusterPermissionsRequestPb.class); + return GetClusterPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequestPb.java new file mode 100755 index 000000000..02547f04e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster permissions */ +@Generated +class GetClusterPermissionsRequestPb { + @JsonIgnore private String clusterId; + + public GetClusterPermissionsRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPermissionsRequestPb that = (GetClusterPermissionsRequestPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPermissionsRequestPb.class) + .add("clusterId", clusterId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequest.java index 2fa32ed3b..5ff98e016 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster policy permission levels */ @Generated +@JsonSerialize( + using = + GetClusterPolicyPermissionLevelsRequest.GetClusterPolicyPermissionLevelsRequestSerializer + .class) +@JsonDeserialize( + using = + GetClusterPolicyPermissionLevelsRequest.GetClusterPolicyPermissionLevelsRequestDeserializer + .class) public class GetClusterPolicyPermissionLevelsRequest { /** The cluster policy for which to get or manage permissions. */ - @JsonIgnore private String clusterPolicyId; + private String clusterPolicyId; public GetClusterPolicyPermissionLevelsRequest setClusterPolicyId(String clusterPolicyId) { this.clusterPolicyId = clusterPolicyId; @@ -41,4 +58,45 @@ public String toString() { .add("clusterPolicyId", clusterPolicyId) .toString(); } + + GetClusterPolicyPermissionLevelsRequestPb toPb() { + GetClusterPolicyPermissionLevelsRequestPb pb = new GetClusterPolicyPermissionLevelsRequestPb(); + pb.setClusterPolicyId(clusterPolicyId); + + return pb; + } + + static GetClusterPolicyPermissionLevelsRequest fromPb( + GetClusterPolicyPermissionLevelsRequestPb pb) { + GetClusterPolicyPermissionLevelsRequest model = new GetClusterPolicyPermissionLevelsRequest(); + model.setClusterPolicyId(pb.getClusterPolicyId()); + + return model; + } + + public static class GetClusterPolicyPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPolicyPermissionLevelsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetClusterPolicyPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPolicyPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterPolicyPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPolicyPermissionLevelsRequestPb pb = + mapper.readValue(p, GetClusterPolicyPermissionLevelsRequestPb.class); + return GetClusterPolicyPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequestPb.java new file mode 100755 index 000000000..4806195c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster policy permission levels */ +@Generated +class GetClusterPolicyPermissionLevelsRequestPb { + @JsonIgnore private String clusterPolicyId; + + public GetClusterPolicyPermissionLevelsRequestPb setClusterPolicyId(String clusterPolicyId) { + this.clusterPolicyId = clusterPolicyId; + return this; + } + + public String getClusterPolicyId() { + return clusterPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPolicyPermissionLevelsRequestPb that = (GetClusterPolicyPermissionLevelsRequestPb) o; + return Objects.equals(clusterPolicyId, that.clusterPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterPolicyId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPolicyPermissionLevelsRequestPb.class) + .add("clusterPolicyId", clusterPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponse.java index c9c713c8c..eda906f94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetClusterPolicyPermissionLevelsResponse.GetClusterPolicyPermissionLevelsResponseSerializer + .class) +@JsonDeserialize( + using = + GetClusterPolicyPermissionLevelsResponse + .GetClusterPolicyPermissionLevelsResponseDeserializer.class) public class GetClusterPolicyPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetClusterPolicyPermissionLevelsResponse setPermissionLevels( @@ -43,4 +59,46 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetClusterPolicyPermissionLevelsResponsePb toPb() { + GetClusterPolicyPermissionLevelsResponsePb pb = + new GetClusterPolicyPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetClusterPolicyPermissionLevelsResponse fromPb( + GetClusterPolicyPermissionLevelsResponsePb pb) { + GetClusterPolicyPermissionLevelsResponse model = new GetClusterPolicyPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetClusterPolicyPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPolicyPermissionLevelsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetClusterPolicyPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPolicyPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetClusterPolicyPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPolicyPermissionLevelsResponsePb pb = + mapper.readValue(p, GetClusterPolicyPermissionLevelsResponsePb.class); + return GetClusterPolicyPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponsePb.java new file mode 100755 index 000000000..6b8ee14c4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionLevelsResponsePb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetClusterPolicyPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetClusterPolicyPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPolicyPermissionLevelsResponsePb that = + (GetClusterPolicyPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPolicyPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java index a062639ac..6c93220d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster policy permissions */ @Generated +@JsonSerialize( + using = GetClusterPolicyPermissionsRequest.GetClusterPolicyPermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetClusterPolicyPermissionsRequest.GetClusterPolicyPermissionsRequestDeserializer.class) public class GetClusterPolicyPermissionsRequest { /** The cluster policy for which to get or manage permissions. */ - @JsonIgnore private String clusterPolicyId; + private String clusterPolicyId; public GetClusterPolicyPermissionsRequest setClusterPolicyId(String clusterPolicyId) { this.clusterPolicyId = clusterPolicyId; @@ -41,4 +54,42 @@ public String toString() { .add("clusterPolicyId", clusterPolicyId) .toString(); } + + GetClusterPolicyPermissionsRequestPb toPb() { + GetClusterPolicyPermissionsRequestPb pb = new GetClusterPolicyPermissionsRequestPb(); + pb.setClusterPolicyId(clusterPolicyId); + + return pb; + } + + static GetClusterPolicyPermissionsRequest fromPb(GetClusterPolicyPermissionsRequestPb pb) { + GetClusterPolicyPermissionsRequest model = new GetClusterPolicyPermissionsRequest(); + model.setClusterPolicyId(pb.getClusterPolicyId()); + + return model; + } + + public static class GetClusterPolicyPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPolicyPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterPolicyPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPolicyPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterPolicyPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPolicyPermissionsRequestPb pb = + mapper.readValue(p, GetClusterPolicyPermissionsRequestPb.class); + return GetClusterPolicyPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequestPb.java new file mode 100755 index 000000000..13f6a2676 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster policy permissions */ +@Generated +class GetClusterPolicyPermissionsRequestPb { + @JsonIgnore private String clusterPolicyId; + + public GetClusterPolicyPermissionsRequestPb setClusterPolicyId(String clusterPolicyId) { + this.clusterPolicyId = clusterPolicyId; + return this; + } + + public String getClusterPolicyId() { + return clusterPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPolicyPermissionsRequestPb that = (GetClusterPolicyPermissionsRequestPb) o; + return Objects.equals(clusterPolicyId, that.clusterPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterPolicyId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPolicyPermissionsRequestPb.class) + .add("clusterPolicyId", clusterPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java index 15db04f6e..19af9eee3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a cluster policy */ @Generated +@JsonSerialize(using = GetClusterPolicyRequest.GetClusterPolicyRequestSerializer.class) +@JsonDeserialize(using = GetClusterPolicyRequest.GetClusterPolicyRequestDeserializer.class) public class GetClusterPolicyRequest { /** Canonical unique identifier for the Cluster Policy. */ - @JsonIgnore - @QueryParam("policy_id") private String policyId; public GetClusterPolicyRequest setPolicyId(String policyId) { @@ -42,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetClusterPolicyRequest.class).add("policyId", policyId).toString(); } + + GetClusterPolicyRequestPb toPb() { + GetClusterPolicyRequestPb pb = new GetClusterPolicyRequestPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static GetClusterPolicyRequest fromPb(GetClusterPolicyRequestPb pb) { + GetClusterPolicyRequest model = new GetClusterPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class GetClusterPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetClusterPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetClusterPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterPolicyRequestPb pb = mapper.readValue(p, GetClusterPolicyRequestPb.class); + return GetClusterPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequestPb.java new file mode 100755 index 000000000..89d712ed9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a cluster policy */ +@Generated +class GetClusterPolicyRequestPb { + @JsonIgnore + @QueryParam("policy_id") + private String policyId; + + public GetClusterPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterPolicyRequestPb that = (GetClusterPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterPolicyRequestPb.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java index 2df188625..5c09d2f18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get cluster info */ @Generated +@JsonSerialize(using = GetClusterRequest.GetClusterRequestSerializer.class) +@JsonDeserialize(using = GetClusterRequest.GetClusterRequestDeserializer.class) public class GetClusterRequest { /** The cluster about which to retrieve information. */ - @JsonIgnore - @QueryParam("cluster_id") private String clusterId; public GetClusterRequest setClusterId(String clusterId) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetClusterRequest.class).add("clusterId", clusterId).toString(); } + + GetClusterRequestPb toPb() { + GetClusterRequestPb pb = new GetClusterRequestPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static GetClusterRequest fromPb(GetClusterRequestPb pb) { + GetClusterRequest model = new GetClusterRequest(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class GetClusterRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetClusterRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetClusterRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetClusterRequestDeserializer extends JsonDeserializer { + @Override + public GetClusterRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetClusterRequestPb pb = mapper.readValue(p, GetClusterRequestPb.class); + return GetClusterRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequestPb.java new file mode 100755 index 000000000..b58d29943 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get cluster info */ +@Generated +class GetClusterRequestPb { + @JsonIgnore + @QueryParam("cluster_id") + private String clusterId; + + public GetClusterRequestPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetClusterRequestPb that = (GetClusterRequestPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(GetClusterRequestPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java index b9d5f385e..a4fed724a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetEvents.GetEventsSerializer.class) +@JsonDeserialize(using = GetEvents.GetEventsDeserializer.class) public class GetEvents { /** The ID of the cluster to retrieve events about. */ - @JsonProperty("cluster_id") private String clusterId; /** The end time in epoch milliseconds. If empty, returns events up to the current time. */ - @JsonProperty("end_time") private Long endTime; /** An optional set of event types to filter on. If empty, all event types are returned. */ - @JsonProperty("event_types") private Collection eventTypes; /** @@ -28,7 +36,6 @@ public class GetEvents { *

The maximum number of events to include in a page of events. Defaults to 50, and maximum * allowed value is 500. */ - @JsonProperty("limit") private Long limit; /** @@ -37,11 +44,9 @@ public class GetEvents { *

The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the * results are requested in descending order, the end_time field is required. */ - @JsonProperty("offset") private Long offset; /** The order to list events in; either "ASC" or "DESC". Defaults to "DESC". */ - @JsonProperty("order") private GetEventsOrder order; /** @@ -50,21 +55,18 @@ public class GetEvents { * server will decide the number of results to be returned. The field has to be in the range * [0,500]. If the value is outside the range, the server enforces 0 or 500. */ - @JsonProperty("page_size") private Long pageSize; /** * Use next_page_token or prev_page_token returned from the previous request to list the next or * previous page of events respectively. If page_token is empty, the first page is returned. */ - @JsonProperty("page_token") private String pageToken; /** * The start time in epoch milliseconds. If empty, returns events starting from the beginning of * time. */ - @JsonProperty("start_time") private Long startTime; public GetEvents setClusterId(String clusterId) { @@ -184,4 +186,53 @@ public String toString() { .add("startTime", startTime) .toString(); } + + GetEventsPb toPb() { + GetEventsPb pb = new GetEventsPb(); + pb.setClusterId(clusterId); + pb.setEndTime(endTime); + pb.setEventTypes(eventTypes); + pb.setLimit(limit); + pb.setOffset(offset); + pb.setOrder(order); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setStartTime(startTime); + + return pb; + } + + static GetEvents fromPb(GetEventsPb pb) { + GetEvents model = new GetEvents(); + model.setClusterId(pb.getClusterId()); + model.setEndTime(pb.getEndTime()); + model.setEventTypes(pb.getEventTypes()); + model.setLimit(pb.getLimit()); + model.setOffset(pb.getOffset()); + model.setOrder(pb.getOrder()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setStartTime(pb.getStartTime()); + + return model; + } + + public static class GetEventsSerializer extends JsonSerializer { + @Override + public void serialize(GetEvents value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEventsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEventsDeserializer extends JsonDeserializer { + @Override + public GetEvents deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEventsPb pb = mapper.readValue(p, GetEventsPb.class); + return GetEvents.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsPb.java new file mode 100755 index 000000000..235dd0280 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsPb.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetEventsPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("event_types") + private Collection eventTypes; + + @JsonProperty("limit") + private Long limit; + + @JsonProperty("offset") + private Long offset; + + @JsonProperty("order") + private GetEventsOrder order; + + @JsonProperty("page_size") + private Long pageSize; + + @JsonProperty("page_token") + private String pageToken; + + @JsonProperty("start_time") + private Long startTime; + + public GetEventsPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public GetEventsPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public GetEventsPb setEventTypes(Collection eventTypes) { + this.eventTypes = eventTypes; + return this; + } + + public Collection getEventTypes() { + return eventTypes; + } + + public GetEventsPb setLimit(Long limit) { + this.limit = limit; + return this; + } + + public Long getLimit() { + return limit; + } + + public GetEventsPb setOffset(Long offset) { + this.offset = offset; + return this; + } + + public Long getOffset() { + return offset; + } + + public GetEventsPb setOrder(GetEventsOrder order) { + this.order = order; + return this; + } + + public GetEventsOrder getOrder() { + return order; + } + + public GetEventsPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GetEventsPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GetEventsPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEventsPb that = (GetEventsPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(endTime, that.endTime) + && Objects.equals(eventTypes, that.eventTypes) + && Objects.equals(limit, that.limit) + && Objects.equals(offset, that.offset) + && Objects.equals(order, that.order) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(startTime, that.startTime); + } + + @Override + public int hashCode() { + return Objects.hash( + clusterId, endTime, eventTypes, limit, offset, order, pageSize, pageToken, startTime); + } + + @Override + public String toString() { + return new ToStringer(GetEventsPb.class) + .add("clusterId", clusterId) + .add("endTime", endTime) + .add("eventTypes", eventTypes) + .add("limit", limit) + .add("offset", offset) + .add("order", order) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("startTime", startTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java index 0ffb98a2d..6b764c6eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetEventsResponse.GetEventsResponseSerializer.class) +@JsonDeserialize(using = GetEventsResponse.GetEventsResponseDeserializer.class) public class GetEventsResponse { /** */ - @JsonProperty("events") private Collection events; /** @@ -20,21 +30,18 @@ public class GetEventsResponse { *

The parameters required to retrieve the next page of events. Omitted if there are no more * events to read. */ - @JsonProperty("next_page") private GetEvents nextPage; /** * This field represents the pagination token to retrieve the next page of results. If the value * is "", it means no further results for the request. */ - @JsonProperty("next_page_token") private String nextPageToken; /** * This field represents the pagination token to retrieve the previous page of results. If the * value is "", it means no further results for the request. */ - @JsonProperty("prev_page_token") private String prevPageToken; /** @@ -43,7 +50,6 @@ public class GetEventsResponse { * *

The total number of events filtered by the start_time, end_time, and event_types. */ - @JsonProperty("total_count") private Long totalCount; public GetEventsResponse setEvents(Collection events) { @@ -118,4 +124,46 @@ public String toString() { .add("totalCount", totalCount) .toString(); } + + GetEventsResponsePb toPb() { + GetEventsResponsePb pb = new GetEventsResponsePb(); + pb.setEvents(events); + pb.setNextPage(nextPage); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + pb.setTotalCount(totalCount); + + return pb; + } + + static GetEventsResponse fromPb(GetEventsResponsePb pb) { + GetEventsResponse model = new GetEventsResponse(); + model.setEvents(pb.getEvents()); + model.setNextPage(pb.getNextPage()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + model.setTotalCount(pb.getTotalCount()); + + return model; + } + + public static class GetEventsResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetEventsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEventsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEventsResponseDeserializer extends JsonDeserializer { + @Override + public GetEventsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEventsResponsePb pb = mapper.readValue(p, GetEventsResponsePb.class); + return GetEventsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponsePb.java new file mode 100755 index 000000000..f064d702a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetEventsResponsePb { + @JsonProperty("events") + private Collection events; + + @JsonProperty("next_page") + private GetEvents nextPage; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + @JsonProperty("total_count") + private Long totalCount; + + public GetEventsResponsePb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public GetEventsResponsePb setNextPage(GetEvents nextPage) { + this.nextPage = nextPage; + return this; + } + + public GetEvents getNextPage() { + return nextPage; + } + + public GetEventsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetEventsResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + public GetEventsResponsePb setTotalCount(Long totalCount) { + this.totalCount = totalCount; + return this; + } + + public Long getTotalCount() { + return totalCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEventsResponsePb that = (GetEventsResponsePb) o; + return Objects.equals(events, that.events) + && Objects.equals(nextPage, that.nextPage) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken) + && Objects.equals(totalCount, that.totalCount); + } + + @Override + public int hashCode() { + return Objects.hash(events, nextPage, nextPageToken, prevPageToken, totalCount); + } + + @Override + public String toString() { + return new ToStringer(GetEventsResponsePb.class) + .add("events", events) + .add("nextPage", nextPage) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .add("totalCount", totalCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequest.java index ccc3fca1a..344abcc35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an init script */ @Generated +@JsonSerialize(using = GetGlobalInitScriptRequest.GetGlobalInitScriptRequestSerializer.class) +@JsonDeserialize(using = GetGlobalInitScriptRequest.GetGlobalInitScriptRequestDeserializer.class) public class GetGlobalInitScriptRequest { /** The ID of the global init script. */ - @JsonIgnore private String scriptId; + private String scriptId; public GetGlobalInitScriptRequest setScriptId(String scriptId) { this.scriptId = scriptId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetGlobalInitScriptRequest.class).add("scriptId", scriptId).toString(); } + + GetGlobalInitScriptRequestPb toPb() { + GetGlobalInitScriptRequestPb pb = new GetGlobalInitScriptRequestPb(); + pb.setScriptId(scriptId); + + return pb; + } + + static GetGlobalInitScriptRequest fromPb(GetGlobalInitScriptRequestPb pb) { + GetGlobalInitScriptRequest model = new GetGlobalInitScriptRequest(); + model.setScriptId(pb.getScriptId()); + + return model; + } + + public static class GetGlobalInitScriptRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetGlobalInitScriptRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetGlobalInitScriptRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetGlobalInitScriptRequestDeserializer + extends JsonDeserializer { + @Override + public GetGlobalInitScriptRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetGlobalInitScriptRequestPb pb = mapper.readValue(p, GetGlobalInitScriptRequestPb.class); + return GetGlobalInitScriptRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequestPb.java new file mode 100755 index 000000000..294198a8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetGlobalInitScriptRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an init script */ +@Generated +class GetGlobalInitScriptRequestPb { + @JsonIgnore private String scriptId; + + public GetGlobalInitScriptRequestPb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetGlobalInitScriptRequestPb that = (GetGlobalInitScriptRequestPb) o; + return Objects.equals(scriptId, that.scriptId); + } + + @Override + public int hashCode() { + return Objects.hash(scriptId); + } + + @Override + public String toString() { + return new ToStringer(GetGlobalInitScriptRequestPb.class).add("scriptId", scriptId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java index 97feb90b6..4fc6d5c8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = GetInstancePool.GetInstancePoolSerializer.class) +@JsonDeserialize(using = GetInstancePool.GetInstancePoolDeserializer.class) public class GetInstancePool { /** * Attributes related to instance pools running on Amazon Web Services. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private InstancePoolAwsAttributes awsAttributes; /** * Attributes related to instance pools running on Azure. If not specified at pool creation, a set * of default values will be used. */ - @JsonProperty("azure_attributes") private InstancePoolAzureAttributes azureAttributes; /** @@ -31,7 +40,6 @@ public class GetInstancePool { * *

- Currently, Databricks allows at most 45 custom tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -45,11 +53,9 @@ public class GetInstancePool { * *

- InstancePoolId: */ - @JsonProperty("default_tags") private Map defaultTags; /** Defines the specification of the disks that will be attached to all spark containers. */ - @JsonProperty("disk_spec") private DiskSpec diskSpec; /** @@ -58,14 +64,12 @@ public class GetInstancePool { * feature requires specific AWS permissions to function correctly - refer to the User Guide for * more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** * Attributes related to instance pools running on Google Cloud Platform. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private InstancePoolGcpAttributes gcpAttributes; /** @@ -75,18 +79,15 @@ public class GetInstancePool { * must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove * idle instances from the cache if min cache size could still hold. */ - @JsonProperty("idle_instance_autotermination_minutes") private Long idleInstanceAutoterminationMinutes; /** Canonical unique identifier for the pool. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** * Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 * characters. */ - @JsonProperty("instance_pool_name") private String instancePoolName; /** @@ -94,11 +95,9 @@ public class GetInstancePool { * clusters and idle instances. Clusters that require further instance provisioning will fail * during upsize requests. */ - @JsonProperty("max_capacity") private Long maxCapacity; /** Minimum number of idle instances to keep in the instance pool */ - @JsonProperty("min_idle_instances") private Long minIdleInstances; /** @@ -107,11 +106,9 @@ public class GetInstancePool { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** Custom Docker Image BYOC */ - @JsonProperty("preloaded_docker_images") private Collection preloadedDockerImages; /** @@ -119,19 +116,15 @@ public class GetInstancePool { * started with the preloaded Spark version will start faster. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("preloaded_spark_versions") private Collection preloadedSparkVersions; /** Current state of the instance pool. */ - @JsonProperty("state") private InstancePoolState state; /** Usage statistics about the instance pool. */ - @JsonProperty("stats") private InstancePoolStats stats; /** Status of failed pending instances in the pool. */ - @JsonProperty("status") private InstancePoolStatus status; public GetInstancePool setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { @@ -369,4 +362,72 @@ public String toString() { .add("status", status) .toString(); } + + GetInstancePoolPb toPb() { + GetInstancePoolPb pb = new GetInstancePoolPb(); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setCustomTags(customTags); + pb.setDefaultTags(defaultTags); + pb.setDiskSpec(diskSpec); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setGcpAttributes(gcpAttributes); + pb.setIdleInstanceAutoterminationMinutes(idleInstanceAutoterminationMinutes); + pb.setInstancePoolId(instancePoolId); + pb.setInstancePoolName(instancePoolName); + pb.setMaxCapacity(maxCapacity); + pb.setMinIdleInstances(minIdleInstances); + pb.setNodeTypeId(nodeTypeId); + pb.setPreloadedDockerImages(preloadedDockerImages); + pb.setPreloadedSparkVersions(preloadedSparkVersions); + pb.setState(state); + pb.setStats(stats); + pb.setStatus(status); + + return pb; + } + + static GetInstancePool fromPb(GetInstancePoolPb pb) { + GetInstancePool model = new GetInstancePool(); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setCustomTags(pb.getCustomTags()); + model.setDefaultTags(pb.getDefaultTags()); + model.setDiskSpec(pb.getDiskSpec()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setIdleInstanceAutoterminationMinutes(pb.getIdleInstanceAutoterminationMinutes()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setInstancePoolName(pb.getInstancePoolName()); + model.setMaxCapacity(pb.getMaxCapacity()); + model.setMinIdleInstances(pb.getMinIdleInstances()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setPreloadedDockerImages(pb.getPreloadedDockerImages()); + model.setPreloadedSparkVersions(pb.getPreloadedSparkVersions()); + model.setState(pb.getState()); + model.setStats(pb.getStats()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class GetInstancePoolSerializer extends JsonSerializer { + @Override + public void serialize(GetInstancePool value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetInstancePoolPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetInstancePoolDeserializer extends JsonDeserializer { + @Override + public GetInstancePool deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetInstancePoolPb pb = mapper.readValue(p, GetInstancePoolPb.class); + return GetInstancePool.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPb.java new file mode 100755 index 000000000..ac98a4425 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPb.java @@ -0,0 +1,303 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class GetInstancePoolPb { + @JsonProperty("aws_attributes") + private InstancePoolAwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private InstancePoolAzureAttributes azureAttributes; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("default_tags") + private Map defaultTags; + + @JsonProperty("disk_spec") + private DiskSpec diskSpec; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("gcp_attributes") + private InstancePoolGcpAttributes gcpAttributes; + + @JsonProperty("idle_instance_autotermination_minutes") + private Long idleInstanceAutoterminationMinutes; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("instance_pool_name") + private String instancePoolName; + + @JsonProperty("max_capacity") + private Long maxCapacity; + + @JsonProperty("min_idle_instances") + private Long minIdleInstances; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("preloaded_docker_images") + private Collection preloadedDockerImages; + + @JsonProperty("preloaded_spark_versions") + private Collection preloadedSparkVersions; + + @JsonProperty("state") + private InstancePoolState state; + + @JsonProperty("stats") + private InstancePoolStats stats; + + @JsonProperty("status") + private InstancePoolStatus status; + + public GetInstancePoolPb setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public InstancePoolAwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public GetInstancePoolPb setAzureAttributes(InstancePoolAzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public InstancePoolAzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public GetInstancePoolPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public GetInstancePoolPb setDefaultTags(Map defaultTags) { + this.defaultTags = defaultTags; + return this; + } + + public Map getDefaultTags() { + return defaultTags; + } + + public GetInstancePoolPb setDiskSpec(DiskSpec diskSpec) { + this.diskSpec = diskSpec; + return this; + } + + public DiskSpec getDiskSpec() { + return diskSpec; + } + + public GetInstancePoolPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public GetInstancePoolPb setGcpAttributes(InstancePoolGcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public InstancePoolGcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public GetInstancePoolPb setIdleInstanceAutoterminationMinutes( + Long idleInstanceAutoterminationMinutes) { + this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; + return this; + } + + public Long getIdleInstanceAutoterminationMinutes() { + return idleInstanceAutoterminationMinutes; + } + + public GetInstancePoolPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public GetInstancePoolPb setInstancePoolName(String instancePoolName) { + this.instancePoolName = instancePoolName; + return this; + } + + public String getInstancePoolName() { + return instancePoolName; + } + + public GetInstancePoolPb setMaxCapacity(Long maxCapacity) { + this.maxCapacity = maxCapacity; + return this; + } + + public Long getMaxCapacity() { + return maxCapacity; + } + + public GetInstancePoolPb setMinIdleInstances(Long minIdleInstances) { + this.minIdleInstances = minIdleInstances; + return this; + } + + public Long getMinIdleInstances() { + return minIdleInstances; + } + + public GetInstancePoolPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public GetInstancePoolPb setPreloadedDockerImages(Collection preloadedDockerImages) { + this.preloadedDockerImages = preloadedDockerImages; + return this; + } + + public Collection getPreloadedDockerImages() { + return preloadedDockerImages; + } + + public GetInstancePoolPb setPreloadedSparkVersions(Collection preloadedSparkVersions) { + this.preloadedSparkVersions = preloadedSparkVersions; + return this; + } + + public Collection getPreloadedSparkVersions() { + return preloadedSparkVersions; + } + + public GetInstancePoolPb setState(InstancePoolState state) { + this.state = state; + return this; + } + + public InstancePoolState getState() { + return state; + } + + public GetInstancePoolPb setStats(InstancePoolStats stats) { + this.stats = stats; + return this; + } + + public InstancePoolStats getStats() { + return stats; + } + + public GetInstancePoolPb setStatus(InstancePoolStatus status) { + this.status = status; + return this; + } + + public InstancePoolStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetInstancePoolPb that = (GetInstancePoolPb) o; + return Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(customTags, that.customTags) + && Objects.equals(defaultTags, that.defaultTags) + && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals( + idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(instancePoolName, that.instancePoolName) + && Objects.equals(maxCapacity, that.maxCapacity) + && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) + && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) + && Objects.equals(state, that.state) + && Objects.equals(stats, that.stats) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + awsAttributes, + azureAttributes, + customTags, + defaultTags, + diskSpec, + enableElasticDisk, + gcpAttributes, + idleInstanceAutoterminationMinutes, + instancePoolId, + instancePoolName, + maxCapacity, + minIdleInstances, + nodeTypeId, + preloadedDockerImages, + preloadedSparkVersions, + state, + stats, + status); + } + + @Override + public String toString() { + return new ToStringer(GetInstancePoolPb.class) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("customTags", customTags) + .add("defaultTags", defaultTags) + .add("diskSpec", diskSpec) + .add("enableElasticDisk", enableElasticDisk) + .add("gcpAttributes", gcpAttributes) + .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) + .add("instancePoolId", instancePoolId) + .add("instancePoolName", instancePoolName) + .add("maxCapacity", maxCapacity) + .add("minIdleInstances", minIdleInstances) + .add("nodeTypeId", nodeTypeId) + .add("preloadedDockerImages", preloadedDockerImages) + .add("preloadedSparkVersions", preloadedSparkVersions) + .add("state", state) + .add("stats", stats) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequest.java index afc2a6065..f5586cbd5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get instance pool permission levels */ @Generated +@JsonSerialize( + using = + GetInstancePoolPermissionLevelsRequest.GetInstancePoolPermissionLevelsRequestSerializer + .class) +@JsonDeserialize( + using = + GetInstancePoolPermissionLevelsRequest.GetInstancePoolPermissionLevelsRequestDeserializer + .class) public class GetInstancePoolPermissionLevelsRequest { /** The instance pool for which to get or manage permissions. */ - @JsonIgnore private String instancePoolId; + private String instancePoolId; public GetInstancePoolPermissionLevelsRequest setInstancePoolId(String instancePoolId) { this.instancePoolId = instancePoolId; @@ -41,4 +58,45 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + GetInstancePoolPermissionLevelsRequestPb toPb() { + GetInstancePoolPermissionLevelsRequestPb pb = new GetInstancePoolPermissionLevelsRequestPb(); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static GetInstancePoolPermissionLevelsRequest fromPb( + GetInstancePoolPermissionLevelsRequestPb pb) { + GetInstancePoolPermissionLevelsRequest model = new GetInstancePoolPermissionLevelsRequest(); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class GetInstancePoolPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetInstancePoolPermissionLevelsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetInstancePoolPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetInstancePoolPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetInstancePoolPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetInstancePoolPermissionLevelsRequestPb pb = + mapper.readValue(p, GetInstancePoolPermissionLevelsRequestPb.class); + return GetInstancePoolPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequestPb.java new file mode 100755 index 000000000..b26ce3dab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get instance pool permission levels */ +@Generated +class GetInstancePoolPermissionLevelsRequestPb { + @JsonIgnore private String instancePoolId; + + public GetInstancePoolPermissionLevelsRequestPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetInstancePoolPermissionLevelsRequestPb that = (GetInstancePoolPermissionLevelsRequestPb) o; + return Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(GetInstancePoolPermissionLevelsRequestPb.class) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponse.java index 9bdee18d2..f7306275a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetInstancePoolPermissionLevelsResponse.GetInstancePoolPermissionLevelsResponseSerializer + .class) +@JsonDeserialize( + using = + GetInstancePoolPermissionLevelsResponse.GetInstancePoolPermissionLevelsResponseDeserializer + .class) public class GetInstancePoolPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetInstancePoolPermissionLevelsResponse setPermissionLevels( @@ -43,4 +59,45 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetInstancePoolPermissionLevelsResponsePb toPb() { + GetInstancePoolPermissionLevelsResponsePb pb = new GetInstancePoolPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetInstancePoolPermissionLevelsResponse fromPb( + GetInstancePoolPermissionLevelsResponsePb pb) { + GetInstancePoolPermissionLevelsResponse model = new GetInstancePoolPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetInstancePoolPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetInstancePoolPermissionLevelsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetInstancePoolPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetInstancePoolPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetInstancePoolPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetInstancePoolPermissionLevelsResponsePb pb = + mapper.readValue(p, GetInstancePoolPermissionLevelsResponsePb.class); + return GetInstancePoolPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponsePb.java new file mode 100755 index 000000000..f2854ff36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetInstancePoolPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetInstancePoolPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetInstancePoolPermissionLevelsResponsePb that = (GetInstancePoolPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetInstancePoolPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequest.java index 7b1074d2a..aacefe394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get instance pool permissions */ @Generated +@JsonSerialize( + using = GetInstancePoolPermissionsRequest.GetInstancePoolPermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetInstancePoolPermissionsRequest.GetInstancePoolPermissionsRequestDeserializer.class) public class GetInstancePoolPermissionsRequest { /** The instance pool for which to get or manage permissions. */ - @JsonIgnore private String instancePoolId; + private String instancePoolId; public GetInstancePoolPermissionsRequest setInstancePoolId(String instancePoolId) { this.instancePoolId = instancePoolId; @@ -41,4 +54,42 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + GetInstancePoolPermissionsRequestPb toPb() { + GetInstancePoolPermissionsRequestPb pb = new GetInstancePoolPermissionsRequestPb(); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static GetInstancePoolPermissionsRequest fromPb(GetInstancePoolPermissionsRequestPb pb) { + GetInstancePoolPermissionsRequest model = new GetInstancePoolPermissionsRequest(); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class GetInstancePoolPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetInstancePoolPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetInstancePoolPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetInstancePoolPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetInstancePoolPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetInstancePoolPermissionsRequestPb pb = + mapper.readValue(p, GetInstancePoolPermissionsRequestPb.class); + return GetInstancePoolPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequestPb.java new file mode 100755 index 000000000..cd4005650 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get instance pool permissions */ +@Generated +class GetInstancePoolPermissionsRequestPb { + @JsonIgnore private String instancePoolId; + + public GetInstancePoolPermissionsRequestPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetInstancePoolPermissionsRequestPb that = (GetInstancePoolPermissionsRequestPb) o; + return Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(GetInstancePoolPermissionsRequestPb.class) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequest.java index 8eef5664b..3c54ffb43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get instance pool information */ @Generated +@JsonSerialize(using = GetInstancePoolRequest.GetInstancePoolRequestSerializer.class) +@JsonDeserialize(using = GetInstancePoolRequest.GetInstancePoolRequestDeserializer.class) public class GetInstancePoolRequest { /** The canonical unique identifier for the instance pool. */ - @JsonIgnore - @QueryParam("instance_pool_id") private String instancePoolId; public GetInstancePoolRequest setInstancePoolId(String instancePoolId) { @@ -44,4 +52,41 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + GetInstancePoolRequestPb toPb() { + GetInstancePoolRequestPb pb = new GetInstancePoolRequestPb(); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static GetInstancePoolRequest fromPb(GetInstancePoolRequestPb pb) { + GetInstancePoolRequest model = new GetInstancePoolRequest(); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class GetInstancePoolRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetInstancePoolRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetInstancePoolRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetInstancePoolRequestDeserializer + extends JsonDeserializer { + @Override + public GetInstancePoolRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetInstancePoolRequestPb pb = mapper.readValue(p, GetInstancePoolRequestPb.class); + return GetInstancePoolRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequestPb.java new file mode 100755 index 000000000..7e3522e48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePoolRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get instance pool information */ +@Generated +class GetInstancePoolRequestPb { + @JsonIgnore + @QueryParam("instance_pool_id") + private String instancePoolId; + + public GetInstancePoolRequestPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetInstancePoolRequestPb that = (GetInstancePoolRequestPb) o; + return Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(GetInstancePoolRequestPb.class) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequest.java index 626e1d178..123fa49c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get policy family information */ @Generated +@JsonSerialize(using = GetPolicyFamilyRequest.GetPolicyFamilyRequestSerializer.class) +@JsonDeserialize(using = GetPolicyFamilyRequest.GetPolicyFamilyRequestDeserializer.class) public class GetPolicyFamilyRequest { /** The family ID about which to retrieve information. */ - @JsonIgnore private String policyFamilyId; + private String policyFamilyId; /** The version number for the family to fetch. Defaults to the latest version. */ - @JsonIgnore - @QueryParam("version") private Long version; public GetPolicyFamilyRequest setPolicyFamilyId(String policyFamilyId) { @@ -58,4 +66,43 @@ public String toString() { .add("version", version) .toString(); } + + GetPolicyFamilyRequestPb toPb() { + GetPolicyFamilyRequestPb pb = new GetPolicyFamilyRequestPb(); + pb.setPolicyFamilyId(policyFamilyId); + pb.setVersion(version); + + return pb; + } + + static GetPolicyFamilyRequest fromPb(GetPolicyFamilyRequestPb pb) { + GetPolicyFamilyRequest model = new GetPolicyFamilyRequest(); + model.setPolicyFamilyId(pb.getPolicyFamilyId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class GetPolicyFamilyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPolicyFamilyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPolicyFamilyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPolicyFamilyRequestDeserializer + extends JsonDeserializer { + @Override + public GetPolicyFamilyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPolicyFamilyRequestPb pb = mapper.readValue(p, GetPolicyFamilyRequestPb.class); + return GetPolicyFamilyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequestPb.java new file mode 100755 index 000000000..0d8c7fa8e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetPolicyFamilyRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get policy family information */ +@Generated +class GetPolicyFamilyRequestPb { + @JsonIgnore private String policyFamilyId; + + @JsonIgnore + @QueryParam("version") + private Long version; + + public GetPolicyFamilyRequestPb setPolicyFamilyId(String policyFamilyId) { + this.policyFamilyId = policyFamilyId; + return this; + } + + public String getPolicyFamilyId() { + return policyFamilyId; + } + + public GetPolicyFamilyRequestPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPolicyFamilyRequestPb that = (GetPolicyFamilyRequestPb) o; + return Objects.equals(policyFamilyId, that.policyFamilyId) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(policyFamilyId, version); + } + + @Override + public String toString() { + return new ToStringer(GetPolicyFamilyRequestPb.class) + .add("policyFamilyId", policyFamilyId) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponse.java index 1b9657a5d..dc0f3e13c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetSparkVersionsResponse.GetSparkVersionsResponseSerializer.class) +@JsonDeserialize(using = GetSparkVersionsResponse.GetSparkVersionsResponseDeserializer.class) public class GetSparkVersionsResponse { /** All the available Spark versions. */ - @JsonProperty("versions") private Collection versions; public GetSparkVersionsResponse setVersions(Collection versions) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetSparkVersionsResponse.class).add("versions", versions).toString(); } + + GetSparkVersionsResponsePb toPb() { + GetSparkVersionsResponsePb pb = new GetSparkVersionsResponsePb(); + pb.setVersions(versions); + + return pb; + } + + static GetSparkVersionsResponse fromPb(GetSparkVersionsResponsePb pb) { + GetSparkVersionsResponse model = new GetSparkVersionsResponse(); + model.setVersions(pb.getVersions()); + + return model; + } + + public static class GetSparkVersionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetSparkVersionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSparkVersionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSparkVersionsResponseDeserializer + extends JsonDeserializer { + @Override + public GetSparkVersionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSparkVersionsResponsePb pb = mapper.readValue(p, GetSparkVersionsResponsePb.class); + return GetSparkVersionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponsePb.java new file mode 100755 index 000000000..d566c251c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetSparkVersionsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetSparkVersionsResponsePb { + @JsonProperty("versions") + private Collection versions; + + public GetSparkVersionsResponsePb setVersions(Collection versions) { + this.versions = versions; + return this; + } + + public Collection getVersions() { + return versions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSparkVersionsResponsePb that = (GetSparkVersionsResponsePb) o; + return Objects.equals(versions, that.versions); + } + + @Override + public int hashCode() { + return Objects.hash(versions); + } + + @Override + public String toString() { + return new ToStringer(GetSparkVersionsResponsePb.class).add("versions", versions).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequest.java index 52aac9dde..195e101cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequest.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GlobalInitScriptCreateRequest.GlobalInitScriptCreateRequestSerializer.class) +@JsonDeserialize( + using = GlobalInitScriptCreateRequest.GlobalInitScriptCreateRequestDeserializer.class) public class GlobalInitScriptCreateRequest { /** Specifies whether the script is enabled. The script runs only if enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The name of the script */ - @JsonProperty("name") private String name; /** @@ -28,11 +38,9 @@ public class GlobalInitScriptCreateRequest { * explicit position value conflicts with an existing script value, your request succeeds, but the * original script at that position and all later scripts have their positions incremented by 1. */ - @JsonProperty("position") private Long position; /** The Base64-encoded content of the script. */ - @JsonProperty("script") private String script; public GlobalInitScriptCreateRequest setEnabled(Boolean enabled) { @@ -96,4 +104,48 @@ public String toString() { .add("script", script) .toString(); } + + GlobalInitScriptCreateRequestPb toPb() { + GlobalInitScriptCreateRequestPb pb = new GlobalInitScriptCreateRequestPb(); + pb.setEnabled(enabled); + pb.setName(name); + pb.setPosition(position); + pb.setScript(script); + + return pb; + } + + static GlobalInitScriptCreateRequest fromPb(GlobalInitScriptCreateRequestPb pb) { + GlobalInitScriptCreateRequest model = new GlobalInitScriptCreateRequest(); + model.setEnabled(pb.getEnabled()); + model.setName(pb.getName()); + model.setPosition(pb.getPosition()); + model.setScript(pb.getScript()); + + return model; + } + + public static class GlobalInitScriptCreateRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GlobalInitScriptCreateRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GlobalInitScriptCreateRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GlobalInitScriptCreateRequestDeserializer + extends JsonDeserializer { + @Override + public GlobalInitScriptCreateRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GlobalInitScriptCreateRequestPb pb = + mapper.readValue(p, GlobalInitScriptCreateRequestPb.class); + return GlobalInitScriptCreateRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequestPb.java new file mode 100755 index 000000000..ec897def5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptCreateRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GlobalInitScriptCreateRequestPb { + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("name") + private String name; + + @JsonProperty("position") + private Long position; + + @JsonProperty("script") + private String script; + + public GlobalInitScriptCreateRequestPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public GlobalInitScriptCreateRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GlobalInitScriptCreateRequestPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public GlobalInitScriptCreateRequestPb setScript(String script) { + this.script = script; + return this; + } + + public String getScript() { + return script; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GlobalInitScriptCreateRequestPb that = (GlobalInitScriptCreateRequestPb) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(name, that.name) + && Objects.equals(position, that.position) + && Objects.equals(script, that.script); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, name, position, script); + } + + @Override + public String toString() { + return new ToStringer(GlobalInitScriptCreateRequestPb.class) + .add("enabled", enabled) + .add("name", name) + .add("position", position) + .add("script", script) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetails.java index 06fdf3861..70500f963 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetails.java @@ -4,44 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GlobalInitScriptDetails.GlobalInitScriptDetailsSerializer.class) +@JsonDeserialize(using = GlobalInitScriptDetails.GlobalInitScriptDetailsDeserializer.class) public class GlobalInitScriptDetails { /** Time when the script was created, represented as a Unix timestamp in milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** The username of the user who created the script. */ - @JsonProperty("created_by") private String createdBy; /** Specifies whether the script is enabled. The script runs only if enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The name of the script */ - @JsonProperty("name") private String name; /** * The position of a script, where 0 represents the first script to run, 1 is the second script to * run, in ascending order. */ - @JsonProperty("position") private Long position; /** The global init script ID. */ - @JsonProperty("script_id") private String scriptId; /** Time when the script was updated, represented as a Unix timestamp in milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** The username of the user who last updated the script */ - @JsonProperty("updated_by") private String updatedBy; public GlobalInitScriptDetails setCreatedAt(Long createdAt) { @@ -150,4 +153,55 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + GlobalInitScriptDetailsPb toPb() { + GlobalInitScriptDetailsPb pb = new GlobalInitScriptDetailsPb(); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEnabled(enabled); + pb.setName(name); + pb.setPosition(position); + pb.setScriptId(scriptId); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static GlobalInitScriptDetails fromPb(GlobalInitScriptDetailsPb pb) { + GlobalInitScriptDetails model = new GlobalInitScriptDetails(); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEnabled(pb.getEnabled()); + model.setName(pb.getName()); + model.setPosition(pb.getPosition()); + model.setScriptId(pb.getScriptId()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class GlobalInitScriptDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + GlobalInitScriptDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GlobalInitScriptDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GlobalInitScriptDetailsDeserializer + extends JsonDeserializer { + @Override + public GlobalInitScriptDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GlobalInitScriptDetailsPb pb = mapper.readValue(p, GlobalInitScriptDetailsPb.class); + return GlobalInitScriptDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsPb.java new file mode 100755 index 000000000..becd9ef5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsPb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GlobalInitScriptDetailsPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("name") + private String name; + + @JsonProperty("position") + private Long position; + + @JsonProperty("script_id") + private String scriptId; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public GlobalInitScriptDetailsPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public GlobalInitScriptDetailsPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public GlobalInitScriptDetailsPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public GlobalInitScriptDetailsPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GlobalInitScriptDetailsPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public GlobalInitScriptDetailsPb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + public GlobalInitScriptDetailsPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public GlobalInitScriptDetailsPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GlobalInitScriptDetailsPb that = (GlobalInitScriptDetailsPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(enabled, that.enabled) + && Objects.equals(name, that.name) + && Objects.equals(position, that.position) + && Objects.equals(scriptId, that.scriptId) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, createdBy, enabled, name, position, scriptId, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(GlobalInitScriptDetailsPb.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("enabled", enabled) + .add("name", name) + .add("position", position) + .add("scriptId", scriptId) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContent.java index 31a41511e..7ce6a592e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContent.java @@ -4,48 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GlobalInitScriptDetailsWithContent.GlobalInitScriptDetailsWithContentSerializer.class) +@JsonDeserialize( + using = GlobalInitScriptDetailsWithContent.GlobalInitScriptDetailsWithContentDeserializer.class) public class GlobalInitScriptDetailsWithContent { /** Time when the script was created, represented as a Unix timestamp in milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** The username of the user who created the script. */ - @JsonProperty("created_by") private String createdBy; /** Specifies whether the script is enabled. The script runs only if enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The name of the script */ - @JsonProperty("name") private String name; /** * The position of a script, where 0 represents the first script to run, 1 is the second script to * run, in ascending order. */ - @JsonProperty("position") private Long position; /** The Base64-encoded content of the script. */ - @JsonProperty("script") private String script; /** The global init script ID. */ - @JsonProperty("script_id") private String scriptId; /** Time when the script was updated, represented as a Unix timestamp in milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** The username of the user who last updated the script */ - @JsonProperty("updated_by") private String updatedBy; public GlobalInitScriptDetailsWithContent setCreatedAt(Long createdAt) { @@ -165,4 +169,58 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + GlobalInitScriptDetailsWithContentPb toPb() { + GlobalInitScriptDetailsWithContentPb pb = new GlobalInitScriptDetailsWithContentPb(); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEnabled(enabled); + pb.setName(name); + pb.setPosition(position); + pb.setScript(script); + pb.setScriptId(scriptId); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static GlobalInitScriptDetailsWithContent fromPb(GlobalInitScriptDetailsWithContentPb pb) { + GlobalInitScriptDetailsWithContent model = new GlobalInitScriptDetailsWithContent(); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEnabled(pb.getEnabled()); + model.setName(pb.getName()); + model.setPosition(pb.getPosition()); + model.setScript(pb.getScript()); + model.setScriptId(pb.getScriptId()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class GlobalInitScriptDetailsWithContentSerializer + extends JsonSerializer { + @Override + public void serialize( + GlobalInitScriptDetailsWithContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GlobalInitScriptDetailsWithContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GlobalInitScriptDetailsWithContentDeserializer + extends JsonDeserializer { + @Override + public GlobalInitScriptDetailsWithContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GlobalInitScriptDetailsWithContentPb pb = + mapper.readValue(p, GlobalInitScriptDetailsWithContentPb.class); + return GlobalInitScriptDetailsWithContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContentPb.java new file mode 100755 index 000000000..0b97988e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptDetailsWithContentPb.java @@ -0,0 +1,156 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GlobalInitScriptDetailsWithContentPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("name") + private String name; + + @JsonProperty("position") + private Long position; + + @JsonProperty("script") + private String script; + + @JsonProperty("script_id") + private String scriptId; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public GlobalInitScriptDetailsWithContentPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public GlobalInitScriptDetailsWithContentPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public GlobalInitScriptDetailsWithContentPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public GlobalInitScriptDetailsWithContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GlobalInitScriptDetailsWithContentPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public GlobalInitScriptDetailsWithContentPb setScript(String script) { + this.script = script; + return this; + } + + public String getScript() { + return script; + } + + public GlobalInitScriptDetailsWithContentPb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + public GlobalInitScriptDetailsWithContentPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public GlobalInitScriptDetailsWithContentPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GlobalInitScriptDetailsWithContentPb that = (GlobalInitScriptDetailsWithContentPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(enabled, that.enabled) + && Objects.equals(name, that.name) + && Objects.equals(position, that.position) + && Objects.equals(script, that.script) + && Objects.equals(scriptId, that.scriptId) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, createdBy, enabled, name, position, script, scriptId, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(GlobalInitScriptDetailsWithContentPb.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("enabled", enabled) + .add("name", name) + .add("position", position) + .add("script", script) + .add("scriptId", scriptId) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequest.java index 1d1e56640..b0dcffec0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GlobalInitScriptUpdateRequest.GlobalInitScriptUpdateRequestSerializer.class) +@JsonDeserialize( + using = GlobalInitScriptUpdateRequest.GlobalInitScriptUpdateRequestDeserializer.class) public class GlobalInitScriptUpdateRequest { /** Specifies whether the script is enabled. The script runs only if enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The name of the script */ - @JsonProperty("name") private String name; /** @@ -30,15 +39,13 @@ public class GlobalInitScriptUpdateRequest { * the original script at that position and all later scripts have their positions incremented by * 1. */ - @JsonProperty("position") private Long position; /** The Base64-encoded content of the script. */ - @JsonProperty("script") private String script; /** The ID of the global init script. */ - @JsonIgnore private String scriptId; + private String scriptId; public GlobalInitScriptUpdateRequest setEnabled(Boolean enabled) { this.enabled = enabled; @@ -112,4 +119,50 @@ public String toString() { .add("scriptId", scriptId) .toString(); } + + GlobalInitScriptUpdateRequestPb toPb() { + GlobalInitScriptUpdateRequestPb pb = new GlobalInitScriptUpdateRequestPb(); + pb.setEnabled(enabled); + pb.setName(name); + pb.setPosition(position); + pb.setScript(script); + pb.setScriptId(scriptId); + + return pb; + } + + static GlobalInitScriptUpdateRequest fromPb(GlobalInitScriptUpdateRequestPb pb) { + GlobalInitScriptUpdateRequest model = new GlobalInitScriptUpdateRequest(); + model.setEnabled(pb.getEnabled()); + model.setName(pb.getName()); + model.setPosition(pb.getPosition()); + model.setScript(pb.getScript()); + model.setScriptId(pb.getScriptId()); + + return model; + } + + public static class GlobalInitScriptUpdateRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GlobalInitScriptUpdateRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GlobalInitScriptUpdateRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GlobalInitScriptUpdateRequestDeserializer + extends JsonDeserializer { + @Override + public GlobalInitScriptUpdateRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GlobalInitScriptUpdateRequestPb pb = + mapper.readValue(p, GlobalInitScriptUpdateRequestPb.class); + return GlobalInitScriptUpdateRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequestPb.java new file mode 100755 index 000000000..0f7330d69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptUpdateRequestPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GlobalInitScriptUpdateRequestPb { + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("name") + private String name; + + @JsonProperty("position") + private Long position; + + @JsonProperty("script") + private String script; + + @JsonIgnore private String scriptId; + + public GlobalInitScriptUpdateRequestPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public GlobalInitScriptUpdateRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GlobalInitScriptUpdateRequestPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public GlobalInitScriptUpdateRequestPb setScript(String script) { + this.script = script; + return this; + } + + public String getScript() { + return script; + } + + public GlobalInitScriptUpdateRequestPb setScriptId(String scriptId) { + this.scriptId = scriptId; + return this; + } + + public String getScriptId() { + return scriptId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GlobalInitScriptUpdateRequestPb that = (GlobalInitScriptUpdateRequestPb) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(name, that.name) + && Objects.equals(position, that.position) + && Objects.equals(script, that.script) + && Objects.equals(scriptId, that.scriptId); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, name, position, script, scriptId); + } + + @Override + public String toString() { + return new ToStringer(GlobalInitScriptUpdateRequestPb.class) + .add("enabled", enabled) + .add("name", name) + .add("position", position) + .add("script", script) + .add("scriptId", scriptId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java index 94689ee30..12aef73da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java @@ -21,7 +21,7 @@ public CreateResponse create(GlobalInitScriptCreateRequest request) { String path = "/api/2.0/global-init-scripts"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteGlobalInitScriptRequest request) { String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GlobalInitScriptDetailsWithContent.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public void update(GlobalInitScriptUpdateRequest request) { String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java index f8b84ae54..1d1769589 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InitScriptEventDetails.InitScriptEventDetailsSerializer.class) +@JsonDeserialize(using = InitScriptEventDetails.InitScriptEventDetailsDeserializer.class) public class InitScriptEventDetails { /** The cluster scoped init scripts associated with this cluster event. */ - @JsonProperty("cluster") private Collection cluster; /** The global init scripts associated with this cluster event. */ - @JsonProperty("global") private Collection global; /** @@ -25,7 +34,6 @@ public class InitScriptEventDetails { * *

This should only be defined for the INIT_SCRIPTS_FINISHED event */ - @JsonProperty("reported_for_node") private String reportedForNode; public InitScriptEventDetails setCluster(Collection cluster) { @@ -78,4 +86,45 @@ public String toString() { .add("reportedForNode", reportedForNode) .toString(); } + + InitScriptEventDetailsPb toPb() { + InitScriptEventDetailsPb pb = new InitScriptEventDetailsPb(); + pb.setCluster(cluster); + pb.setGlobal(global); + pb.setReportedForNode(reportedForNode); + + return pb; + } + + static InitScriptEventDetails fromPb(InitScriptEventDetailsPb pb) { + InitScriptEventDetails model = new InitScriptEventDetails(); + model.setCluster(pb.getCluster()); + model.setGlobal(pb.getGlobal()); + model.setReportedForNode(pb.getReportedForNode()); + + return model; + } + + public static class InitScriptEventDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + InitScriptEventDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InitScriptEventDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InitScriptEventDetailsDeserializer + extends JsonDeserializer { + @Override + public InitScriptEventDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InitScriptEventDetailsPb pb = mapper.readValue(p, InitScriptEventDetailsPb.class); + return InitScriptEventDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetailsPb.java new file mode 100755 index 000000000..842ec9a84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetailsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InitScriptEventDetailsPb { + @JsonProperty("cluster") + private Collection cluster; + + @JsonProperty("global") + private Collection global; + + @JsonProperty("reported_for_node") + private String reportedForNode; + + public InitScriptEventDetailsPb setCluster( + Collection cluster) { + this.cluster = cluster; + return this; + } + + public Collection getCluster() { + return cluster; + } + + public InitScriptEventDetailsPb setGlobal(Collection global) { + this.global = global; + return this; + } + + public Collection getGlobal() { + return global; + } + + public InitScriptEventDetailsPb setReportedForNode(String reportedForNode) { + this.reportedForNode = reportedForNode; + return this; + } + + public String getReportedForNode() { + return reportedForNode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InitScriptEventDetailsPb that = (InitScriptEventDetailsPb) o; + return Objects.equals(cluster, that.cluster) + && Objects.equals(global, that.global) + && Objects.equals(reportedForNode, that.reportedForNode); + } + + @Override + public int hashCode() { + return Objects.hash(cluster, global, reportedForNode); + } + + @Override + public String toString() { + return new ToStringer(InitScriptEventDetailsPb.class) + .add("cluster", cluster) + .add("global", global) + .add("reportedForNode", reportedForNode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java index 9a696988e..e1229449e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java @@ -4,37 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Config for an individual init script Next ID: 11 */ @Generated +@JsonSerialize(using = InitScriptInfo.InitScriptInfoSerializer.class) +@JsonDeserialize(using = InitScriptInfo.InitScriptInfoDeserializer.class) public class InitScriptInfo { /** * destination needs to be provided, e.g. * `abfss://@.dfs.core.windows.net/` */ - @JsonProperty("abfss") private Adlsgen2Info abfss; /** * destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } * }` */ - @JsonProperty("dbfs") private DbfsStorageInfo dbfs; /** * destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } * }` */ - @JsonProperty("file") private LocalFileInfo file; /** * destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }` */ - @JsonProperty("gcs") private GcsStorageInfo gcs; /** @@ -43,21 +50,18 @@ public class InitScriptInfo { * role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has * permission to write data to the s3 destination. */ - @JsonProperty("s3") private S3StorageInfo s3; /** * destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : * \"/Volumes/my-init.sh\" } }` */ - @JsonProperty("volumes") private VolumesStorageInfo volumes; /** * destination needs to be provided, e.g. `{ "workspace": { "destination": * "/cluster-init-scripts/setup-datadog.sh" } }` */ - @JsonProperty("workspace") private WorkspaceStorageInfo workspace; public InitScriptInfo setAbfss(Adlsgen2Info abfss) { @@ -154,4 +158,50 @@ public String toString() { .add("workspace", workspace) .toString(); } + + InitScriptInfoPb toPb() { + InitScriptInfoPb pb = new InitScriptInfoPb(); + pb.setAbfss(abfss); + pb.setDbfs(dbfs); + pb.setFile(file); + pb.setGcs(gcs); + pb.setS3(s3); + pb.setVolumes(volumes); + pb.setWorkspace(workspace); + + return pb; + } + + static InitScriptInfo fromPb(InitScriptInfoPb pb) { + InitScriptInfo model = new InitScriptInfo(); + model.setAbfss(pb.getAbfss()); + model.setDbfs(pb.getDbfs()); + model.setFile(pb.getFile()); + model.setGcs(pb.getGcs()); + model.setS3(pb.getS3()); + model.setVolumes(pb.getVolumes()); + model.setWorkspace(pb.getWorkspace()); + + return model; + } + + public static class InitScriptInfoSerializer extends JsonSerializer { + @Override + public void serialize(InitScriptInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InitScriptInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InitScriptInfoDeserializer extends JsonDeserializer { + @Override + public InitScriptInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InitScriptInfoPb pb = mapper.readValue(p, InitScriptInfoPb.class); + return InitScriptInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java index 44fd3c1f4..ff955d40e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java @@ -4,23 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = InitScriptInfoAndExecutionDetails.InitScriptInfoAndExecutionDetailsSerializer.class) +@JsonDeserialize( + using = InitScriptInfoAndExecutionDetails.InitScriptInfoAndExecutionDetailsDeserializer.class) public class InitScriptInfoAndExecutionDetails { /** * destination needs to be provided, e.g. * `abfss://@.dfs.core.windows.net/` */ - @JsonProperty("abfss") private Adlsgen2Info abfss; /** * destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } * }` */ - @JsonProperty("dbfs") private DbfsStorageInfo dbfs; /** @@ -28,24 +39,20 @@ public class InitScriptInfoAndExecutionDetails { * FAILED_FETCH). This field should only be used to provide *additional* information to the status * field, not duplicate it. */ - @JsonProperty("error_message") private String errorMessage; /** The number duration of the script execution in seconds */ - @JsonProperty("execution_duration_seconds") private Long executionDurationSeconds; /** * destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } * }` */ - @JsonProperty("file") private LocalFileInfo file; /** * destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }` */ - @JsonProperty("gcs") private GcsStorageInfo gcs; /** @@ -54,25 +61,21 @@ public class InitScriptInfoAndExecutionDetails { * role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has * permission to write data to the s3 destination. */ - @JsonProperty("s3") private S3StorageInfo s3; /** The current status of the script */ - @JsonProperty("status") private InitScriptExecutionDetailsInitScriptExecutionStatus status; /** * destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : * \"/Volumes/my-init.sh\" } }` */ - @JsonProperty("volumes") private VolumesStorageInfo volumes; /** * destination needs to be provided, e.g. `{ "workspace": { "destination": * "/cluster-init-scripts/setup-datadog.sh" } }` */ - @JsonProperty("workspace") private WorkspaceStorageInfo workspace; public InitScriptInfoAndExecutionDetails setAbfss(Adlsgen2Info abfss) { @@ -214,4 +217,60 @@ public String toString() { .add("workspace", workspace) .toString(); } + + InitScriptInfoAndExecutionDetailsPb toPb() { + InitScriptInfoAndExecutionDetailsPb pb = new InitScriptInfoAndExecutionDetailsPb(); + pb.setAbfss(abfss); + pb.setDbfs(dbfs); + pb.setErrorMessage(errorMessage); + pb.setExecutionDurationSeconds(executionDurationSeconds); + pb.setFile(file); + pb.setGcs(gcs); + pb.setS3(s3); + pb.setStatus(status); + pb.setVolumes(volumes); + pb.setWorkspace(workspace); + + return pb; + } + + static InitScriptInfoAndExecutionDetails fromPb(InitScriptInfoAndExecutionDetailsPb pb) { + InitScriptInfoAndExecutionDetails model = new InitScriptInfoAndExecutionDetails(); + model.setAbfss(pb.getAbfss()); + model.setDbfs(pb.getDbfs()); + model.setErrorMessage(pb.getErrorMessage()); + model.setExecutionDurationSeconds(pb.getExecutionDurationSeconds()); + model.setFile(pb.getFile()); + model.setGcs(pb.getGcs()); + model.setS3(pb.getS3()); + model.setStatus(pb.getStatus()); + model.setVolumes(pb.getVolumes()); + model.setWorkspace(pb.getWorkspace()); + + return model; + } + + public static class InitScriptInfoAndExecutionDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + InitScriptInfoAndExecutionDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InitScriptInfoAndExecutionDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InitScriptInfoAndExecutionDetailsDeserializer + extends JsonDeserializer { + @Override + public InitScriptInfoAndExecutionDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InitScriptInfoAndExecutionDetailsPb pb = + mapper.readValue(p, InitScriptInfoAndExecutionDetailsPb.class); + return InitScriptInfoAndExecutionDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetailsPb.java new file mode 100755 index 000000000..55d631491 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetailsPb.java @@ -0,0 +1,181 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InitScriptInfoAndExecutionDetailsPb { + @JsonProperty("abfss") + private Adlsgen2Info abfss; + + @JsonProperty("dbfs") + private DbfsStorageInfo dbfs; + + @JsonProperty("error_message") + private String errorMessage; + + @JsonProperty("execution_duration_seconds") + private Long executionDurationSeconds; + + @JsonProperty("file") + private LocalFileInfo file; + + @JsonProperty("gcs") + private GcsStorageInfo gcs; + + @JsonProperty("s3") + private S3StorageInfo s3; + + @JsonProperty("status") + private InitScriptExecutionDetailsInitScriptExecutionStatus status; + + @JsonProperty("volumes") + private VolumesStorageInfo volumes; + + @JsonProperty("workspace") + private WorkspaceStorageInfo workspace; + + public InitScriptInfoAndExecutionDetailsPb setAbfss(Adlsgen2Info abfss) { + this.abfss = abfss; + return this; + } + + public Adlsgen2Info getAbfss() { + return abfss; + } + + public InitScriptInfoAndExecutionDetailsPb setDbfs(DbfsStorageInfo dbfs) { + this.dbfs = dbfs; + return this; + } + + public DbfsStorageInfo getDbfs() { + return dbfs; + } + + public InitScriptInfoAndExecutionDetailsPb setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public InitScriptInfoAndExecutionDetailsPb setExecutionDurationSeconds( + Long executionDurationSeconds) { + this.executionDurationSeconds = executionDurationSeconds; + return this; + } + + public Long getExecutionDurationSeconds() { + return executionDurationSeconds; + } + + public InitScriptInfoAndExecutionDetailsPb setFile(LocalFileInfo file) { + this.file = file; + return this; + } + + public LocalFileInfo getFile() { + return file; + } + + public InitScriptInfoAndExecutionDetailsPb setGcs(GcsStorageInfo gcs) { + this.gcs = gcs; + return this; + } + + public GcsStorageInfo getGcs() { + return gcs; + } + + public InitScriptInfoAndExecutionDetailsPb setS3(S3StorageInfo s3) { + this.s3 = s3; + return this; + } + + public S3StorageInfo getS3() { + return s3; + } + + public InitScriptInfoAndExecutionDetailsPb setStatus( + InitScriptExecutionDetailsInitScriptExecutionStatus status) { + this.status = status; + return this; + } + + public InitScriptExecutionDetailsInitScriptExecutionStatus getStatus() { + return status; + } + + public InitScriptInfoAndExecutionDetailsPb setVolumes(VolumesStorageInfo volumes) { + this.volumes = volumes; + return this; + } + + public VolumesStorageInfo getVolumes() { + return volumes; + } + + public InitScriptInfoAndExecutionDetailsPb setWorkspace(WorkspaceStorageInfo workspace) { + this.workspace = workspace; + return this; + } + + public WorkspaceStorageInfo getWorkspace() { + return workspace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InitScriptInfoAndExecutionDetailsPb that = (InitScriptInfoAndExecutionDetailsPb) o; + return Objects.equals(abfss, that.abfss) + && Objects.equals(dbfs, that.dbfs) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(executionDurationSeconds, that.executionDurationSeconds) + && Objects.equals(file, that.file) + && Objects.equals(gcs, that.gcs) + && Objects.equals(s3, that.s3) + && Objects.equals(status, that.status) + && Objects.equals(volumes, that.volumes) + && Objects.equals(workspace, that.workspace); + } + + @Override + public int hashCode() { + return Objects.hash( + abfss, + dbfs, + errorMessage, + executionDurationSeconds, + file, + gcs, + s3, + status, + volumes, + workspace); + } + + @Override + public String toString() { + return new ToStringer(InitScriptInfoAndExecutionDetailsPb.class) + .add("abfss", abfss) + .add("dbfs", dbfs) + .add("errorMessage", errorMessage) + .add("executionDurationSeconds", executionDurationSeconds) + .add("file", file) + .add("gcs", gcs) + .add("s3", s3) + .add("status", status) + .add("volumes", volumes) + .add("workspace", workspace) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoPb.java new file mode 100755 index 000000000..122f8492c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Config for an individual init script Next ID: 11 */ +@Generated +class InitScriptInfoPb { + @JsonProperty("abfss") + private Adlsgen2Info abfss; + + @JsonProperty("dbfs") + private DbfsStorageInfo dbfs; + + @JsonProperty("file") + private LocalFileInfo file; + + @JsonProperty("gcs") + private GcsStorageInfo gcs; + + @JsonProperty("s3") + private S3StorageInfo s3; + + @JsonProperty("volumes") + private VolumesStorageInfo volumes; + + @JsonProperty("workspace") + private WorkspaceStorageInfo workspace; + + public InitScriptInfoPb setAbfss(Adlsgen2Info abfss) { + this.abfss = abfss; + return this; + } + + public Adlsgen2Info getAbfss() { + return abfss; + } + + public InitScriptInfoPb setDbfs(DbfsStorageInfo dbfs) { + this.dbfs = dbfs; + return this; + } + + public DbfsStorageInfo getDbfs() { + return dbfs; + } + + public InitScriptInfoPb setFile(LocalFileInfo file) { + this.file = file; + return this; + } + + public LocalFileInfo getFile() { + return file; + } + + public InitScriptInfoPb setGcs(GcsStorageInfo gcs) { + this.gcs = gcs; + return this; + } + + public GcsStorageInfo getGcs() { + return gcs; + } + + public InitScriptInfoPb setS3(S3StorageInfo s3) { + this.s3 = s3; + return this; + } + + public S3StorageInfo getS3() { + return s3; + } + + public InitScriptInfoPb setVolumes(VolumesStorageInfo volumes) { + this.volumes = volumes; + return this; + } + + public VolumesStorageInfo getVolumes() { + return volumes; + } + + public InitScriptInfoPb setWorkspace(WorkspaceStorageInfo workspace) { + this.workspace = workspace; + return this; + } + + public WorkspaceStorageInfo getWorkspace() { + return workspace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InitScriptInfoPb that = (InitScriptInfoPb) o; + return Objects.equals(abfss, that.abfss) + && Objects.equals(dbfs, that.dbfs) + && Objects.equals(file, that.file) + && Objects.equals(gcs, that.gcs) + && Objects.equals(s3, that.s3) + && Objects.equals(volumes, that.volumes) + && Objects.equals(workspace, that.workspace); + } + + @Override + public int hashCode() { + return Objects.hash(abfss, dbfs, file, gcs, s3, volumes, workspace); + } + + @Override + public String toString() { + return new ToStringer(InitScriptInfoPb.class) + .add("abfss", abfss) + .add("dbfs", dbfs) + .add("file", file) + .add("gcs", gcs) + .add("s3", s3) + .add("volumes", volumes) + .add("workspace", workspace) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java index 3f070a0bd..7d175b568 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InstallLibraries.InstallLibrariesSerializer.class) +@JsonDeserialize(using = InstallLibraries.InstallLibrariesDeserializer.class) public class InstallLibraries { /** Unique identifier for the cluster on which to install these libraries. */ - @JsonProperty("cluster_id") private String clusterId; /** The libraries to install. */ - @JsonProperty("libraries") private Collection libraries; public InstallLibraries setClusterId(String clusterId) { @@ -56,4 +65,40 @@ public String toString() { .add("libraries", libraries) .toString(); } + + InstallLibrariesPb toPb() { + InstallLibrariesPb pb = new InstallLibrariesPb(); + pb.setClusterId(clusterId); + pb.setLibraries(libraries); + + return pb; + } + + static InstallLibraries fromPb(InstallLibrariesPb pb) { + InstallLibraries model = new InstallLibraries(); + model.setClusterId(pb.getClusterId()); + model.setLibraries(pb.getLibraries()); + + return model; + } + + public static class InstallLibrariesSerializer extends JsonSerializer { + @Override + public void serialize(InstallLibraries value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstallLibrariesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstallLibrariesDeserializer extends JsonDeserializer { + @Override + public InstallLibraries deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstallLibrariesPb pb = mapper.readValue(p, InstallLibrariesPb.class); + return InstallLibraries.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesPb.java new file mode 100755 index 000000000..0bc3c5da7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstallLibrariesPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("libraries") + private Collection libraries; + + public InstallLibrariesPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public InstallLibrariesPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstallLibrariesPb that = (InstallLibrariesPb) o; + return Objects.equals(clusterId, that.clusterId) && Objects.equals(libraries, that.libraries); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, libraries); + } + + @Override + public String toString() { + return new ToStringer(InstallLibrariesPb.class) + .add("clusterId", clusterId) + .add("libraries", libraries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java index 58d55bb76..27f6389f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = InstallLibrariesResponse.InstallLibrariesResponseSerializer.class) +@JsonDeserialize(using = InstallLibrariesResponse.InstallLibrariesResponseDeserializer.class) public class InstallLibrariesResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(InstallLibrariesResponse.class).toString(); } + + InstallLibrariesResponsePb toPb() { + InstallLibrariesResponsePb pb = new InstallLibrariesResponsePb(); + + return pb; + } + + static InstallLibrariesResponse fromPb(InstallLibrariesResponsePb pb) { + InstallLibrariesResponse model = new InstallLibrariesResponse(); + + return model; + } + + public static class InstallLibrariesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + InstallLibrariesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstallLibrariesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstallLibrariesResponseDeserializer + extends JsonDeserializer { + @Override + public InstallLibrariesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstallLibrariesResponsePb pb = mapper.readValue(p, InstallLibrariesResponsePb.class); + return InstallLibrariesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponsePb.java new file mode 100755 index 000000000..948cdfad1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class InstallLibrariesResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(InstallLibrariesResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java index 94ea72be1..4e646d653 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = InstancePoolAccessControlRequest.InstancePoolAccessControlRequestSerializer.class) +@JsonDeserialize( + using = InstancePoolAccessControlRequest.InstancePoolAccessControlRequestDeserializer.class) public class InstancePoolAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public InstancePoolAccessControlRequest setGroupName(String groupName) { @@ -87,4 +96,48 @@ public String toString() { .add("userName", userName) .toString(); } + + InstancePoolAccessControlRequestPb toPb() { + InstancePoolAccessControlRequestPb pb = new InstancePoolAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static InstancePoolAccessControlRequest fromPb(InstancePoolAccessControlRequestPb pb) { + InstancePoolAccessControlRequest model = new InstancePoolAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class InstancePoolAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public InstancePoolAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolAccessControlRequestPb pb = + mapper.readValue(p, InstancePoolAccessControlRequestPb.class); + return InstancePoolAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequestPb.java new file mode 100755 index 000000000..1da6b97a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InstancePoolAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private InstancePoolPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public InstancePoolAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public InstancePoolAccessControlRequestPb setPermissionLevel( + InstancePoolPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public InstancePoolPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public InstancePoolAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public InstancePoolAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolAccessControlRequestPb that = (InstancePoolAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java index d40d6d8c5..aa5fc8325 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java @@ -4,30 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = InstancePoolAccessControlResponse.InstancePoolAccessControlResponseSerializer.class) +@JsonDeserialize( + using = InstancePoolAccessControlResponse.InstancePoolAccessControlResponseDeserializer.class) public class InstancePoolAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public InstancePoolAccessControlResponse setAllPermissions( @@ -103,4 +111,50 @@ public String toString() { .add("userName", userName) .toString(); } + + InstancePoolAccessControlResponsePb toPb() { + InstancePoolAccessControlResponsePb pb = new InstancePoolAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static InstancePoolAccessControlResponse fromPb(InstancePoolAccessControlResponsePb pb) { + InstancePoolAccessControlResponse model = new InstancePoolAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class InstancePoolAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public InstancePoolAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolAccessControlResponsePb pb = + mapper.readValue(p, InstancePoolAccessControlResponsePb.class); + return InstancePoolAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponsePb.java new file mode 100755 index 000000000..367c699be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstancePoolAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public InstancePoolAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public InstancePoolAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public InstancePoolAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public InstancePoolAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public InstancePoolAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolAccessControlResponsePb that = (InstancePoolAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java index f2fd58676..36a582bd0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = InstancePoolAndStats.InstancePoolAndStatsSerializer.class) +@JsonDeserialize(using = InstancePoolAndStats.InstancePoolAndStatsDeserializer.class) public class InstancePoolAndStats { /** * Attributes related to instance pools running on Amazon Web Services. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private InstancePoolAwsAttributes awsAttributes; /** * Attributes related to instance pools running on Azure. If not specified at pool creation, a set * of default values will be used. */ - @JsonProperty("azure_attributes") private InstancePoolAzureAttributes azureAttributes; /** @@ -31,7 +40,6 @@ public class InstancePoolAndStats { * *

- Currently, Databricks allows at most 45 custom tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -45,11 +53,9 @@ public class InstancePoolAndStats { * *

- InstancePoolId: */ - @JsonProperty("default_tags") private Map defaultTags; /** Defines the specification of the disks that will be attached to all spark containers. */ - @JsonProperty("disk_spec") private DiskSpec diskSpec; /** @@ -58,14 +64,12 @@ public class InstancePoolAndStats { * feature requires specific AWS permissions to function correctly - refer to the User Guide for * more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** * Attributes related to instance pools running on Google Cloud Platform. If not specified at pool * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private InstancePoolGcpAttributes gcpAttributes; /** @@ -75,18 +79,15 @@ public class InstancePoolAndStats { * must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove * idle instances from the cache if min cache size could still hold. */ - @JsonProperty("idle_instance_autotermination_minutes") private Long idleInstanceAutoterminationMinutes; /** Canonical unique identifier for the pool. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** * Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 * characters. */ - @JsonProperty("instance_pool_name") private String instancePoolName; /** @@ -94,11 +95,9 @@ public class InstancePoolAndStats { * clusters and idle instances. Clusters that require further instance provisioning will fail * during upsize requests. */ - @JsonProperty("max_capacity") private Long maxCapacity; /** Minimum number of idle instances to keep in the instance pool */ - @JsonProperty("min_idle_instances") private Long minIdleInstances; /** @@ -107,11 +106,9 @@ public class InstancePoolAndStats { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** Custom Docker Image BYOC */ - @JsonProperty("preloaded_docker_images") private Collection preloadedDockerImages; /** @@ -119,19 +116,15 @@ public class InstancePoolAndStats { * started with the preloaded Spark version will start faster. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("preloaded_spark_versions") private Collection preloadedSparkVersions; /** Current state of the instance pool. */ - @JsonProperty("state") private InstancePoolState state; /** Usage statistics about the instance pool. */ - @JsonProperty("stats") private InstancePoolStats stats; /** Status of failed pending instances in the pool. */ - @JsonProperty("status") private InstancePoolStatus status; public InstancePoolAndStats setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { @@ -370,4 +363,74 @@ public String toString() { .add("status", status) .toString(); } + + InstancePoolAndStatsPb toPb() { + InstancePoolAndStatsPb pb = new InstancePoolAndStatsPb(); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setCustomTags(customTags); + pb.setDefaultTags(defaultTags); + pb.setDiskSpec(diskSpec); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setGcpAttributes(gcpAttributes); + pb.setIdleInstanceAutoterminationMinutes(idleInstanceAutoterminationMinutes); + pb.setInstancePoolId(instancePoolId); + pb.setInstancePoolName(instancePoolName); + pb.setMaxCapacity(maxCapacity); + pb.setMinIdleInstances(minIdleInstances); + pb.setNodeTypeId(nodeTypeId); + pb.setPreloadedDockerImages(preloadedDockerImages); + pb.setPreloadedSparkVersions(preloadedSparkVersions); + pb.setState(state); + pb.setStats(stats); + pb.setStatus(status); + + return pb; + } + + static InstancePoolAndStats fromPb(InstancePoolAndStatsPb pb) { + InstancePoolAndStats model = new InstancePoolAndStats(); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setCustomTags(pb.getCustomTags()); + model.setDefaultTags(pb.getDefaultTags()); + model.setDiskSpec(pb.getDiskSpec()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setIdleInstanceAutoterminationMinutes(pb.getIdleInstanceAutoterminationMinutes()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setInstancePoolName(pb.getInstancePoolName()); + model.setMaxCapacity(pb.getMaxCapacity()); + model.setMinIdleInstances(pb.getMinIdleInstances()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setPreloadedDockerImages(pb.getPreloadedDockerImages()); + model.setPreloadedSparkVersions(pb.getPreloadedSparkVersions()); + model.setState(pb.getState()); + model.setStats(pb.getStats()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class InstancePoolAndStatsSerializer extends JsonSerializer { + @Override + public void serialize( + InstancePoolAndStats value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolAndStatsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolAndStatsDeserializer + extends JsonDeserializer { + @Override + public InstancePoolAndStats deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolAndStatsPb pb = mapper.readValue(p, InstancePoolAndStatsPb.class); + return InstancePoolAndStats.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStatsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStatsPb.java new file mode 100755 index 000000000..42a41c568 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStatsPb.java @@ -0,0 +1,305 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class InstancePoolAndStatsPb { + @JsonProperty("aws_attributes") + private InstancePoolAwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private InstancePoolAzureAttributes azureAttributes; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("default_tags") + private Map defaultTags; + + @JsonProperty("disk_spec") + private DiskSpec diskSpec; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("gcp_attributes") + private InstancePoolGcpAttributes gcpAttributes; + + @JsonProperty("idle_instance_autotermination_minutes") + private Long idleInstanceAutoterminationMinutes; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("instance_pool_name") + private String instancePoolName; + + @JsonProperty("max_capacity") + private Long maxCapacity; + + @JsonProperty("min_idle_instances") + private Long minIdleInstances; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("preloaded_docker_images") + private Collection preloadedDockerImages; + + @JsonProperty("preloaded_spark_versions") + private Collection preloadedSparkVersions; + + @JsonProperty("state") + private InstancePoolState state; + + @JsonProperty("stats") + private InstancePoolStats stats; + + @JsonProperty("status") + private InstancePoolStatus status; + + public InstancePoolAndStatsPb setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public InstancePoolAwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public InstancePoolAndStatsPb setAzureAttributes(InstancePoolAzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public InstancePoolAzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public InstancePoolAndStatsPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public InstancePoolAndStatsPb setDefaultTags(Map defaultTags) { + this.defaultTags = defaultTags; + return this; + } + + public Map getDefaultTags() { + return defaultTags; + } + + public InstancePoolAndStatsPb setDiskSpec(DiskSpec diskSpec) { + this.diskSpec = diskSpec; + return this; + } + + public DiskSpec getDiskSpec() { + return diskSpec; + } + + public InstancePoolAndStatsPb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public InstancePoolAndStatsPb setGcpAttributes(InstancePoolGcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public InstancePoolGcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public InstancePoolAndStatsPb setIdleInstanceAutoterminationMinutes( + Long idleInstanceAutoterminationMinutes) { + this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; + return this; + } + + public Long getIdleInstanceAutoterminationMinutes() { + return idleInstanceAutoterminationMinutes; + } + + public InstancePoolAndStatsPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public InstancePoolAndStatsPb setInstancePoolName(String instancePoolName) { + this.instancePoolName = instancePoolName; + return this; + } + + public String getInstancePoolName() { + return instancePoolName; + } + + public InstancePoolAndStatsPb setMaxCapacity(Long maxCapacity) { + this.maxCapacity = maxCapacity; + return this; + } + + public Long getMaxCapacity() { + return maxCapacity; + } + + public InstancePoolAndStatsPb setMinIdleInstances(Long minIdleInstances) { + this.minIdleInstances = minIdleInstances; + return this; + } + + public Long getMinIdleInstances() { + return minIdleInstances; + } + + public InstancePoolAndStatsPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public InstancePoolAndStatsPb setPreloadedDockerImages( + Collection preloadedDockerImages) { + this.preloadedDockerImages = preloadedDockerImages; + return this; + } + + public Collection getPreloadedDockerImages() { + return preloadedDockerImages; + } + + public InstancePoolAndStatsPb setPreloadedSparkVersions( + Collection preloadedSparkVersions) { + this.preloadedSparkVersions = preloadedSparkVersions; + return this; + } + + public Collection getPreloadedSparkVersions() { + return preloadedSparkVersions; + } + + public InstancePoolAndStatsPb setState(InstancePoolState state) { + this.state = state; + return this; + } + + public InstancePoolState getState() { + return state; + } + + public InstancePoolAndStatsPb setStats(InstancePoolStats stats) { + this.stats = stats; + return this; + } + + public InstancePoolStats getStats() { + return stats; + } + + public InstancePoolAndStatsPb setStatus(InstancePoolStatus status) { + this.status = status; + return this; + } + + public InstancePoolStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolAndStatsPb that = (InstancePoolAndStatsPb) o; + return Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(customTags, that.customTags) + && Objects.equals(defaultTags, that.defaultTags) + && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals( + idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(instancePoolName, that.instancePoolName) + && Objects.equals(maxCapacity, that.maxCapacity) + && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) + && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) + && Objects.equals(state, that.state) + && Objects.equals(stats, that.stats) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + awsAttributes, + azureAttributes, + customTags, + defaultTags, + diskSpec, + enableElasticDisk, + gcpAttributes, + idleInstanceAutoterminationMinutes, + instancePoolId, + instancePoolName, + maxCapacity, + minIdleInstances, + nodeTypeId, + preloadedDockerImages, + preloadedSparkVersions, + state, + stats, + status); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolAndStatsPb.class) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("customTags", customTags) + .add("defaultTags", defaultTags) + .add("diskSpec", diskSpec) + .add("enableElasticDisk", enableElasticDisk) + .add("gcpAttributes", gcpAttributes) + .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) + .add("instancePoolId", instancePoolId) + .add("instancePoolName", instancePoolName) + .add("maxCapacity", maxCapacity) + .add("minIdleInstances", minIdleInstances) + .add("nodeTypeId", nodeTypeId) + .add("preloadedDockerImages", preloadedDockerImages) + .add("preloadedSparkVersions", preloadedSparkVersions) + .add("state", state) + .add("stats", stats) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java index 2520eca50..42c1f113d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during instance pool creation which are related to Amazon Web Services. */ @Generated +@JsonSerialize(using = InstancePoolAwsAttributes.InstancePoolAwsAttributesSerializer.class) +@JsonDeserialize(using = InstancePoolAwsAttributes.InstancePoolAwsAttributesDeserializer.class) public class InstancePoolAwsAttributes { /** Availability type used for the spot nodes. */ - @JsonProperty("availability") private InstancePoolAwsAttributesAvailability availability; /** @@ -24,7 +34,6 @@ public class InstancePoolAwsAttributes { * matches this field will be considered. Note that, for safety, we enforce this field to be no * more than 10000. */ - @JsonProperty("spot_bid_price_percent") private Long spotBidPricePercent; /** @@ -35,7 +44,6 @@ public class InstancePoolAwsAttributes { * and if not specified, a default zone will be used. The list of available zones as well as the * default value can be found by using the `List Zones` method. */ - @JsonProperty("zone_id") private String zoneId; public InstancePoolAwsAttributes setAvailability( @@ -89,4 +97,45 @@ public String toString() { .add("zoneId", zoneId) .toString(); } + + InstancePoolAwsAttributesPb toPb() { + InstancePoolAwsAttributesPb pb = new InstancePoolAwsAttributesPb(); + pb.setAvailability(availability); + pb.setSpotBidPricePercent(spotBidPricePercent); + pb.setZoneId(zoneId); + + return pb; + } + + static InstancePoolAwsAttributes fromPb(InstancePoolAwsAttributesPb pb) { + InstancePoolAwsAttributes model = new InstancePoolAwsAttributes(); + model.setAvailability(pb.getAvailability()); + model.setSpotBidPricePercent(pb.getSpotBidPricePercent()); + model.setZoneId(pb.getZoneId()); + + return model; + } + + public static class InstancePoolAwsAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolAwsAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolAwsAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolAwsAttributesDeserializer + extends JsonDeserializer { + @Override + public InstancePoolAwsAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolAwsAttributesPb pb = mapper.readValue(p, InstancePoolAwsAttributesPb.class); + return InstancePoolAwsAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesPb.java new file mode 100755 index 000000000..2deb898e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during instance pool creation which are related to Amazon Web Services. */ +@Generated +class InstancePoolAwsAttributesPb { + @JsonProperty("availability") + private InstancePoolAwsAttributesAvailability availability; + + @JsonProperty("spot_bid_price_percent") + private Long spotBidPricePercent; + + @JsonProperty("zone_id") + private String zoneId; + + public InstancePoolAwsAttributesPb setAvailability( + InstancePoolAwsAttributesAvailability availability) { + this.availability = availability; + return this; + } + + public InstancePoolAwsAttributesAvailability getAvailability() { + return availability; + } + + public InstancePoolAwsAttributesPb setSpotBidPricePercent(Long spotBidPricePercent) { + this.spotBidPricePercent = spotBidPricePercent; + return this; + } + + public Long getSpotBidPricePercent() { + return spotBidPricePercent; + } + + public InstancePoolAwsAttributesPb setZoneId(String zoneId) { + this.zoneId = zoneId; + return this; + } + + public String getZoneId() { + return zoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolAwsAttributesPb that = (InstancePoolAwsAttributesPb) o; + return Objects.equals(availability, that.availability) + && Objects.equals(spotBidPricePercent, that.spotBidPricePercent) + && Objects.equals(zoneId, that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash(availability, spotBidPricePercent, zoneId); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolAwsAttributesPb.class) + .add("availability", availability) + .add("spotBidPricePercent", spotBidPricePercent) + .add("zoneId", zoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java index da52c8f92..20e851ea8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during instance pool creation which are related to Azure. */ @Generated +@JsonSerialize(using = InstancePoolAzureAttributes.InstancePoolAzureAttributesSerializer.class) +@JsonDeserialize(using = InstancePoolAzureAttributes.InstancePoolAzureAttributesDeserializer.class) public class InstancePoolAzureAttributes { /** Availability type used for the spot nodes. */ - @JsonProperty("availability") private InstancePoolAzureAttributesAvailability availability; /** @@ -20,7 +30,6 @@ public class InstancePoolAzureAttributes { * won't be evicted based on price. The price for the VM will be the current price for spot or the * price for a standard VM, which ever is less, as long as there is capacity and quota available. */ - @JsonProperty("spot_bid_max_price") private Double spotBidMaxPrice; public InstancePoolAzureAttributes setAvailability( @@ -63,4 +72,43 @@ public String toString() { .add("spotBidMaxPrice", spotBidMaxPrice) .toString(); } + + InstancePoolAzureAttributesPb toPb() { + InstancePoolAzureAttributesPb pb = new InstancePoolAzureAttributesPb(); + pb.setAvailability(availability); + pb.setSpotBidMaxPrice(spotBidMaxPrice); + + return pb; + } + + static InstancePoolAzureAttributes fromPb(InstancePoolAzureAttributesPb pb) { + InstancePoolAzureAttributes model = new InstancePoolAzureAttributes(); + model.setAvailability(pb.getAvailability()); + model.setSpotBidMaxPrice(pb.getSpotBidMaxPrice()); + + return model; + } + + public static class InstancePoolAzureAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolAzureAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolAzureAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolAzureAttributesDeserializer + extends JsonDeserializer { + @Override + public InstancePoolAzureAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolAzureAttributesPb pb = mapper.readValue(p, InstancePoolAzureAttributesPb.class); + return InstancePoolAzureAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesPb.java new file mode 100755 index 000000000..d33d3b446 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during instance pool creation which are related to Azure. */ +@Generated +class InstancePoolAzureAttributesPb { + @JsonProperty("availability") + private InstancePoolAzureAttributesAvailability availability; + + @JsonProperty("spot_bid_max_price") + private Double spotBidMaxPrice; + + public InstancePoolAzureAttributesPb setAvailability( + InstancePoolAzureAttributesAvailability availability) { + this.availability = availability; + return this; + } + + public InstancePoolAzureAttributesAvailability getAvailability() { + return availability; + } + + public InstancePoolAzureAttributesPb setSpotBidMaxPrice(Double spotBidMaxPrice) { + this.spotBidMaxPrice = spotBidMaxPrice; + return this; + } + + public Double getSpotBidMaxPrice() { + return spotBidMaxPrice; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolAzureAttributesPb that = (InstancePoolAzureAttributesPb) o; + return Objects.equals(availability, that.availability) + && Objects.equals(spotBidMaxPrice, that.spotBidMaxPrice); + } + + @Override + public int hashCode() { + return Objects.hash(availability, spotBidMaxPrice); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolAzureAttributesPb.class) + .add("availability", availability) + .add("spotBidMaxPrice", spotBidMaxPrice) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java index a97e496ad..2d9f54cff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes set during instance pool creation which are related to GCP. */ @Generated +@JsonSerialize(using = InstancePoolGcpAttributes.InstancePoolGcpAttributesSerializer.class) +@JsonDeserialize(using = InstancePoolGcpAttributes.InstancePoolGcpAttributesDeserializer.class) public class InstancePoolGcpAttributes { /** * This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or * preemptible VMs with a fallback to on-demand VMs if the former is unavailable. */ - @JsonProperty("gcp_availability") private GcpAvailability gcpAvailability; /** @@ -25,7 +35,6 @@ public class InstancePoolGcpAttributes { *

[GCP documentation]: * https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds */ - @JsonProperty("local_ssd_count") private Long localSsdCount; /** @@ -42,7 +51,6 @@ public class InstancePoolGcpAttributes { * *

If empty, Databricks picks an availability zone to schedule the cluster on. */ - @JsonProperty("zone_id") private String zoneId; public InstancePoolGcpAttributes setGcpAvailability(GcpAvailability gcpAvailability) { @@ -95,4 +103,45 @@ public String toString() { .add("zoneId", zoneId) .toString(); } + + InstancePoolGcpAttributesPb toPb() { + InstancePoolGcpAttributesPb pb = new InstancePoolGcpAttributesPb(); + pb.setGcpAvailability(gcpAvailability); + pb.setLocalSsdCount(localSsdCount); + pb.setZoneId(zoneId); + + return pb; + } + + static InstancePoolGcpAttributes fromPb(InstancePoolGcpAttributesPb pb) { + InstancePoolGcpAttributes model = new InstancePoolGcpAttributes(); + model.setGcpAvailability(pb.getGcpAvailability()); + model.setLocalSsdCount(pb.getLocalSsdCount()); + model.setZoneId(pb.getZoneId()); + + return model; + } + + public static class InstancePoolGcpAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolGcpAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolGcpAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolGcpAttributesDeserializer + extends JsonDeserializer { + @Override + public InstancePoolGcpAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolGcpAttributesPb pb = mapper.readValue(p, InstancePoolGcpAttributesPb.class); + return InstancePoolGcpAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributesPb.java new file mode 100755 index 000000000..306b86996 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributesPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes set during instance pool creation which are related to GCP. */ +@Generated +class InstancePoolGcpAttributesPb { + @JsonProperty("gcp_availability") + private GcpAvailability gcpAvailability; + + @JsonProperty("local_ssd_count") + private Long localSsdCount; + + @JsonProperty("zone_id") + private String zoneId; + + public InstancePoolGcpAttributesPb setGcpAvailability(GcpAvailability gcpAvailability) { + this.gcpAvailability = gcpAvailability; + return this; + } + + public GcpAvailability getGcpAvailability() { + return gcpAvailability; + } + + public InstancePoolGcpAttributesPb setLocalSsdCount(Long localSsdCount) { + this.localSsdCount = localSsdCount; + return this; + } + + public Long getLocalSsdCount() { + return localSsdCount; + } + + public InstancePoolGcpAttributesPb setZoneId(String zoneId) { + this.zoneId = zoneId; + return this; + } + + public String getZoneId() { + return zoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolGcpAttributesPb that = (InstancePoolGcpAttributesPb) o; + return Objects.equals(gcpAvailability, that.gcpAvailability) + && Objects.equals(localSsdCount, that.localSsdCount) + && Objects.equals(zoneId, that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash(gcpAvailability, localSsdCount, zoneId); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolGcpAttributesPb.class) + .add("gcpAvailability", gcpAvailability) + .add("localSsdCount", localSsdCount) + .add("zoneId", zoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java index 7d00ebac0..ede0bb491 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InstancePoolPermission.InstancePoolPermissionSerializer.class) +@JsonDeserialize(using = InstancePoolPermission.InstancePoolPermissionDeserializer.class) public class InstancePoolPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; public InstancePoolPermission setInherited(Boolean inherited) { @@ -72,4 +80,45 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + InstancePoolPermissionPb toPb() { + InstancePoolPermissionPb pb = new InstancePoolPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static InstancePoolPermission fromPb(InstancePoolPermissionPb pb) { + InstancePoolPermission model = new InstancePoolPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class InstancePoolPermissionSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolPermissionDeserializer + extends JsonDeserializer { + @Override + public InstancePoolPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolPermissionPb pb = mapper.readValue(p, InstancePoolPermissionPb.class); + return InstancePoolPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionPb.java new file mode 100755 index 000000000..3706ffb08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstancePoolPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private InstancePoolPermissionLevel permissionLevel; + + public InstancePoolPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public InstancePoolPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public InstancePoolPermissionPb setPermissionLevel(InstancePoolPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public InstancePoolPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolPermissionPb that = (InstancePoolPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissions.java index 1e886702e..60af0dd2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InstancePoolPermissions.InstancePoolPermissionsSerializer.class) +@JsonDeserialize(using = InstancePoolPermissions.InstancePoolPermissionsDeserializer.class) public class InstancePoolPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public InstancePoolPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + InstancePoolPermissionsPb toPb() { + InstancePoolPermissionsPb pb = new InstancePoolPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static InstancePoolPermissions fromPb(InstancePoolPermissionsPb pb) { + InstancePoolPermissions model = new InstancePoolPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class InstancePoolPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolPermissionsDeserializer + extends JsonDeserializer { + @Override + public InstancePoolPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolPermissionsPb pb = mapper.readValue(p, InstancePoolPermissionsPb.class); + return InstancePoolPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java index 9b2ede0ca..638f000cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = InstancePoolPermissionsDescription.InstancePoolPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = InstancePoolPermissionsDescription.InstancePoolPermissionsDescriptionDeserializer.class) public class InstancePoolPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; public InstancePoolPermissionsDescription setDescription(String description) { @@ -57,4 +68,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + InstancePoolPermissionsDescriptionPb toPb() { + InstancePoolPermissionsDescriptionPb pb = new InstancePoolPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static InstancePoolPermissionsDescription fromPb(InstancePoolPermissionsDescriptionPb pb) { + InstancePoolPermissionsDescription model = new InstancePoolPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class InstancePoolPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public InstancePoolPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolPermissionsDescriptionPb pb = + mapper.readValue(p, InstancePoolPermissionsDescriptionPb.class); + return InstancePoolPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescriptionPb.java new file mode 100755 index 000000000..c6ac836d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InstancePoolPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private InstancePoolPermissionLevel permissionLevel; + + public InstancePoolPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public InstancePoolPermissionsDescriptionPb setPermissionLevel( + InstancePoolPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public InstancePoolPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolPermissionsDescriptionPb that = (InstancePoolPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsPb.java new file mode 100755 index 000000000..dfaf8477d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstancePoolPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public InstancePoolPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public InstancePoolPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public InstancePoolPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolPermissionsPb that = (InstancePoolPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequest.java index e812333ac..5a64d9b8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequest.java @@ -4,19 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = InstancePoolPermissionsRequest.InstancePoolPermissionsRequestSerializer.class) +@JsonDeserialize( + using = InstancePoolPermissionsRequest.InstancePoolPermissionsRequestDeserializer.class) public class InstancePoolPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The instance pool for which to get or manage permissions. */ - @JsonIgnore private String instancePoolId; + private String instancePoolId; public InstancePoolPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +69,44 @@ public String toString() { .add("instancePoolId", instancePoolId) .toString(); } + + InstancePoolPermissionsRequestPb toPb() { + InstancePoolPermissionsRequestPb pb = new InstancePoolPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setInstancePoolId(instancePoolId); + + return pb; + } + + static InstancePoolPermissionsRequest fromPb(InstancePoolPermissionsRequestPb pb) { + InstancePoolPermissionsRequest model = new InstancePoolPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setInstancePoolId(pb.getInstancePoolId()); + + return model; + } + + public static class InstancePoolPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + InstancePoolPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public InstancePoolPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolPermissionsRequestPb pb = + mapper.readValue(p, InstancePoolPermissionsRequestPb.class); + return InstancePoolPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequestPb.java new file mode 100755 index 000000000..e3e20e016 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstancePoolPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String instancePoolId; + + public InstancePoolPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public InstancePoolPermissionsRequestPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolPermissionsRequestPb that = (InstancePoolPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(instancePoolId, that.instancePoolId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, instancePoolId); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("instancePoolId", instancePoolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStats.java index cf01f3c19..85a00d790 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStats.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = InstancePoolStats.InstancePoolStatsSerializer.class) +@JsonDeserialize(using = InstancePoolStats.InstancePoolStatsDeserializer.class) public class InstancePoolStats { /** Number of active instances in the pool that are NOT part of a cluster. */ - @JsonProperty("idle_count") private Long idleCount; /** Number of pending instances in the pool that are NOT part of a cluster. */ - @JsonProperty("pending_idle_count") private Long pendingIdleCount; /** Number of pending instances in the pool that are part of a cluster. */ - @JsonProperty("pending_used_count") private Long pendingUsedCount; /** Number of active instances in the pool that are part of a cluster. */ - @JsonProperty("used_count") private Long usedCount; public InstancePoolStats setIdleCount(Long idleCount) { @@ -86,4 +93,44 @@ public String toString() { .add("usedCount", usedCount) .toString(); } + + InstancePoolStatsPb toPb() { + InstancePoolStatsPb pb = new InstancePoolStatsPb(); + pb.setIdleCount(idleCount); + pb.setPendingIdleCount(pendingIdleCount); + pb.setPendingUsedCount(pendingUsedCount); + pb.setUsedCount(usedCount); + + return pb; + } + + static InstancePoolStats fromPb(InstancePoolStatsPb pb) { + InstancePoolStats model = new InstancePoolStats(); + model.setIdleCount(pb.getIdleCount()); + model.setPendingIdleCount(pb.getPendingIdleCount()); + model.setPendingUsedCount(pb.getPendingUsedCount()); + model.setUsedCount(pb.getUsedCount()); + + return model; + } + + public static class InstancePoolStatsSerializer extends JsonSerializer { + @Override + public void serialize(InstancePoolStats value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolStatsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolStatsDeserializer extends JsonDeserializer { + @Override + public InstancePoolStats deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolStatsPb pb = mapper.readValue(p, InstancePoolStatsPb.class); + return InstancePoolStats.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatsPb.java new file mode 100755 index 000000000..75097496e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatsPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InstancePoolStatsPb { + @JsonProperty("idle_count") + private Long idleCount; + + @JsonProperty("pending_idle_count") + private Long pendingIdleCount; + + @JsonProperty("pending_used_count") + private Long pendingUsedCount; + + @JsonProperty("used_count") + private Long usedCount; + + public InstancePoolStatsPb setIdleCount(Long idleCount) { + this.idleCount = idleCount; + return this; + } + + public Long getIdleCount() { + return idleCount; + } + + public InstancePoolStatsPb setPendingIdleCount(Long pendingIdleCount) { + this.pendingIdleCount = pendingIdleCount; + return this; + } + + public Long getPendingIdleCount() { + return pendingIdleCount; + } + + public InstancePoolStatsPb setPendingUsedCount(Long pendingUsedCount) { + this.pendingUsedCount = pendingUsedCount; + return this; + } + + public Long getPendingUsedCount() { + return pendingUsedCount; + } + + public InstancePoolStatsPb setUsedCount(Long usedCount) { + this.usedCount = usedCount; + return this; + } + + public Long getUsedCount() { + return usedCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolStatsPb that = (InstancePoolStatsPb) o; + return Objects.equals(idleCount, that.idleCount) + && Objects.equals(pendingIdleCount, that.pendingIdleCount) + && Objects.equals(pendingUsedCount, that.pendingUsedCount) + && Objects.equals(usedCount, that.usedCount); + } + + @Override + public int hashCode() { + return Objects.hash(idleCount, pendingIdleCount, pendingUsedCount, usedCount); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolStatsPb.class) + .add("idleCount", idleCount) + .add("pendingIdleCount", pendingIdleCount) + .add("pendingUsedCount", pendingUsedCount) + .add("usedCount", usedCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatus.java index 568239d21..3d75b0ef1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatus.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InstancePoolStatus.InstancePoolStatusSerializer.class) +@JsonDeserialize(using = InstancePoolStatus.InstancePoolStatusDeserializer.class) public class InstancePoolStatus { /** * List of error messages for the failed pending instances. The pending_instance_errors follows * FIFO with maximum length of the min_idle of the pool. The pending_instance_errors is emptied * once the number of exiting available instances reaches the min_idle of the pool. */ - @JsonProperty("pending_instance_errors") private Collection pendingInstanceErrors; public InstancePoolStatus setPendingInstanceErrors( @@ -47,4 +57,38 @@ public String toString() { .add("pendingInstanceErrors", pendingInstanceErrors) .toString(); } + + InstancePoolStatusPb toPb() { + InstancePoolStatusPb pb = new InstancePoolStatusPb(); + pb.setPendingInstanceErrors(pendingInstanceErrors); + + return pb; + } + + static InstancePoolStatus fromPb(InstancePoolStatusPb pb) { + InstancePoolStatus model = new InstancePoolStatus(); + model.setPendingInstanceErrors(pb.getPendingInstanceErrors()); + + return model; + } + + public static class InstancePoolStatusSerializer extends JsonSerializer { + @Override + public void serialize(InstancePoolStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstancePoolStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstancePoolStatusDeserializer extends JsonDeserializer { + @Override + public InstancePoolStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstancePoolStatusPb pb = mapper.readValue(p, InstancePoolStatusPb.class); + return InstancePoolStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatusPb.java new file mode 100755 index 000000000..155eb4c05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolStatusPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstancePoolStatusPb { + @JsonProperty("pending_instance_errors") + private Collection pendingInstanceErrors; + + public InstancePoolStatusPb setPendingInstanceErrors( + Collection pendingInstanceErrors) { + this.pendingInstanceErrors = pendingInstanceErrors; + return this; + } + + public Collection getPendingInstanceErrors() { + return pendingInstanceErrors; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstancePoolStatusPb that = (InstancePoolStatusPb) o; + return Objects.equals(pendingInstanceErrors, that.pendingInstanceErrors); + } + + @Override + public int hashCode() { + return Objects.hash(pendingInstanceErrors); + } + + @Override + public String toString() { + return new ToStringer(InstancePoolStatusPb.class) + .add("pendingInstanceErrors", pendingInstanceErrors) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java index b80dd7710..31faa0135 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java @@ -21,7 +21,7 @@ public CreateInstancePoolResponse create(CreateInstancePool request) { String path = "/api/2.0/instance-pools/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateInstancePoolResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteInstancePool request) { String path = "/api/2.0/instance-pools/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteInstancePoolResponse.class); @@ -49,7 +49,7 @@ public void edit(EditInstancePool request) { String path = "/api/2.0/instance-pools/edit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditInstancePoolResponse.class); @@ -63,7 +63,7 @@ public GetInstancePool get(GetInstancePoolRequest request) { String path = "/api/2.0/instance-pools/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetInstancePool.class); } catch (IOException e) { @@ -79,7 +79,7 @@ public GetInstancePoolPermissionLevelsResponse getPermissionLevels( "/api/2.0/permissions/instance-pools/%s/permissionLevels", request.getInstancePoolId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetInstancePoolPermissionLevelsResponse.class); } catch (IOException e) { @@ -93,7 +93,7 @@ public InstancePoolPermissions getPermissions(GetInstancePoolPermissionsRequest String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, InstancePoolPermissions.class); } catch (IOException e) { @@ -119,7 +119,7 @@ public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest req String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, InstancePoolPermissions.class); @@ -134,7 +134,7 @@ public InstancePoolPermissions updatePermissions(InstancePoolPermissionsRequest String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, InstancePoolPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfile.java index 4c2efe75b..ed146a554 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfile.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = InstanceProfile.InstanceProfileSerializer.class) +@JsonDeserialize(using = InstanceProfile.InstanceProfileDeserializer.class) public class InstanceProfile { /** * The AWS IAM role ARN of the role associated with the instance profile. This field is required @@ -18,11 +29,9 @@ public class InstanceProfile { * *

[Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html */ - @JsonProperty("iam_role_arn") private String iamRoleArn; /** The AWS ARN of the instance profile to register with Databricks. This field is required. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** @@ -31,7 +40,6 @@ public class InstanceProfile { * could assume a wide range of roles. Therefore it should always be used with authorization. This * field is optional, the default value is `false`. */ - @JsonProperty("is_meta_instance_profile") private Boolean isMetaInstanceProfile; public InstanceProfile setIamRoleArn(String iamRoleArn) { @@ -84,4 +92,42 @@ public String toString() { .add("isMetaInstanceProfile", isMetaInstanceProfile) .toString(); } + + InstanceProfilePb toPb() { + InstanceProfilePb pb = new InstanceProfilePb(); + pb.setIamRoleArn(iamRoleArn); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setIsMetaInstanceProfile(isMetaInstanceProfile); + + return pb; + } + + static InstanceProfile fromPb(InstanceProfilePb pb) { + InstanceProfile model = new InstanceProfile(); + model.setIamRoleArn(pb.getIamRoleArn()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setIsMetaInstanceProfile(pb.getIsMetaInstanceProfile()); + + return model; + } + + public static class InstanceProfileSerializer extends JsonSerializer { + @Override + public void serialize(InstanceProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstanceProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstanceProfileDeserializer extends JsonDeserializer { + @Override + public InstanceProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstanceProfilePb pb = mapper.readValue(p, InstanceProfilePb.class); + return InstanceProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilePb.java new file mode 100755 index 000000000..f5cfaa38a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InstanceProfilePb { + @JsonProperty("iam_role_arn") + private String iamRoleArn; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("is_meta_instance_profile") + private Boolean isMetaInstanceProfile; + + public InstanceProfilePb setIamRoleArn(String iamRoleArn) { + this.iamRoleArn = iamRoleArn; + return this; + } + + public String getIamRoleArn() { + return iamRoleArn; + } + + public InstanceProfilePb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public InstanceProfilePb setIsMetaInstanceProfile(Boolean isMetaInstanceProfile) { + this.isMetaInstanceProfile = isMetaInstanceProfile; + return this; + } + + public Boolean getIsMetaInstanceProfile() { + return isMetaInstanceProfile; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstanceProfilePb that = (InstanceProfilePb) o; + return Objects.equals(iamRoleArn, that.iamRoleArn) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(isMetaInstanceProfile, that.isMetaInstanceProfile); + } + + @Override + public int hashCode() { + return Objects.hash(iamRoleArn, instanceProfileArn, isMetaInstanceProfile); + } + + @Override + public String toString() { + return new ToStringer(InstanceProfilePb.class) + .add("iamRoleArn", iamRoleArn) + .add("instanceProfileArn", instanceProfileArn) + .add("isMetaInstanceProfile", isMetaInstanceProfile) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java index de759151d..0fee53290 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java @@ -21,7 +21,7 @@ public void add(AddInstanceProfile request) { String path = "/api/2.0/instance-profiles/add"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, AddResponse.class); @@ -35,7 +35,7 @@ public void edit(InstanceProfile request) { String path = "/api/2.0/instance-profiles/edit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditResponse.class); @@ -61,7 +61,7 @@ public void remove(RemoveInstanceProfile request) { String path = "/api/2.0/instance-profiles/remove"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, RemoveResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java index 2b8b647f2..8e670b4e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java @@ -33,7 +33,7 @@ public ClusterLibraryStatuses clusterStatus(ClusterStatus request) { String path = "/api/2.0/libraries/cluster-status"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ClusterLibraryStatuses.class); } catch (IOException e) { @@ -46,7 +46,7 @@ public void install(InstallLibraries request) { String path = "/api/2.0/libraries/install"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, InstallLibrariesResponse.class); @@ -60,7 +60,7 @@ public void uninstall(UninstallLibraries request) { String path = "/api/2.0/libraries/uninstall"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UninstallLibrariesResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java index f1be94bae..9d3ba0904 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Library.LibrarySerializer.class) +@JsonDeserialize(using = Library.LibraryDeserializer.class) public class Library { /** Specification of a CRAN library to be installed as part of the library */ - @JsonProperty("cran") private RCranLibrary cran; /** * Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is * not supported in Databricks Runtime 14.0 and above. */ - @JsonProperty("egg") private String egg; /** @@ -27,18 +36,15 @@ public class Library { * is used, please make sure the cluster has read access on the library. You may need to launch * the cluster with an IAM role to access the S3 URI. */ - @JsonProperty("jar") private String jar; /** * Specification of a maven library to be installed. For example: `{ "coordinates": * "org.jsoup:jsoup:1.7.2" }` */ - @JsonProperty("maven") private MavenLibrary maven; /** Specification of a PyPi library to be installed. For example: `{ "package": "simplejson" }` */ - @JsonProperty("pypi") private PythonPyPiLibrary pypi; /** @@ -46,7 +52,6 @@ public class Library { * paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }` * or `{ "requirements" : "/Volumes/path/to/requirements.txt" }` */ - @JsonProperty("requirements") private String requirements; /** @@ -56,7 +61,6 @@ public class Library { * is used, please make sure the cluster has read access on the library. You may need to launch * the cluster with an IAM role to access the S3 URI. */ - @JsonProperty("whl") private String whl; public Library setCran(RCranLibrary cran) { @@ -153,4 +157,49 @@ public String toString() { .add("whl", whl) .toString(); } + + LibraryPb toPb() { + LibraryPb pb = new LibraryPb(); + pb.setCran(cran); + pb.setEgg(egg); + pb.setJar(jar); + pb.setMaven(maven); + pb.setPypi(pypi); + pb.setRequirements(requirements); + pb.setWhl(whl); + + return pb; + } + + static Library fromPb(LibraryPb pb) { + Library model = new Library(); + model.setCran(pb.getCran()); + model.setEgg(pb.getEgg()); + model.setJar(pb.getJar()); + model.setMaven(pb.getMaven()); + model.setPypi(pb.getPypi()); + model.setRequirements(pb.getRequirements()); + model.setWhl(pb.getWhl()); + + return model; + } + + public static class LibrarySerializer extends JsonSerializer { + @Override + public void serialize(Library value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LibraryDeserializer extends JsonDeserializer { + @Override + public Library deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LibraryPb pb = mapper.readValue(p, LibraryPb.class); + return Library.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java index 8a4a0b6c0..d6f9ee2aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java @@ -4,27 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The status of the library on a specific cluster. */ @Generated +@JsonSerialize(using = LibraryFullStatus.LibraryFullStatusSerializer.class) +@JsonDeserialize(using = LibraryFullStatus.LibraryFullStatusDeserializer.class) public class LibraryFullStatus { /** Whether the library was set to be installed on all clusters via the libraries UI. */ - @JsonProperty("is_library_for_all_clusters") private Boolean isLibraryForAllClusters; /** Unique identifier for the library. */ - @JsonProperty("library") private Library library; /** All the info and warning messages that have occurred so far for this library. */ - @JsonProperty("messages") private Collection messages; /** Status of installing the library on the cluster. */ - @JsonProperty("status") private LibraryInstallStatus status; public LibraryFullStatus setIsLibraryForAllClusters(Boolean isLibraryForAllClusters) { @@ -88,4 +95,44 @@ public String toString() { .add("status", status) .toString(); } + + LibraryFullStatusPb toPb() { + LibraryFullStatusPb pb = new LibraryFullStatusPb(); + pb.setIsLibraryForAllClusters(isLibraryForAllClusters); + pb.setLibrary(library); + pb.setMessages(messages); + pb.setStatus(status); + + return pb; + } + + static LibraryFullStatus fromPb(LibraryFullStatusPb pb) { + LibraryFullStatus model = new LibraryFullStatus(); + model.setIsLibraryForAllClusters(pb.getIsLibraryForAllClusters()); + model.setLibrary(pb.getLibrary()); + model.setMessages(pb.getMessages()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class LibraryFullStatusSerializer extends JsonSerializer { + @Override + public void serialize(LibraryFullStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LibraryFullStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LibraryFullStatusDeserializer extends JsonDeserializer { + @Override + public LibraryFullStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LibraryFullStatusPb pb = mapper.readValue(p, LibraryFullStatusPb.class); + return LibraryFullStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusPb.java new file mode 100755 index 000000000..0f058fa96 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The status of the library on a specific cluster. */ +@Generated +class LibraryFullStatusPb { + @JsonProperty("is_library_for_all_clusters") + private Boolean isLibraryForAllClusters; + + @JsonProperty("library") + private Library library; + + @JsonProperty("messages") + private Collection messages; + + @JsonProperty("status") + private LibraryInstallStatus status; + + public LibraryFullStatusPb setIsLibraryForAllClusters(Boolean isLibraryForAllClusters) { + this.isLibraryForAllClusters = isLibraryForAllClusters; + return this; + } + + public Boolean getIsLibraryForAllClusters() { + return isLibraryForAllClusters; + } + + public LibraryFullStatusPb setLibrary(Library library) { + this.library = library; + return this; + } + + public Library getLibrary() { + return library; + } + + public LibraryFullStatusPb setMessages(Collection messages) { + this.messages = messages; + return this; + } + + public Collection getMessages() { + return messages; + } + + public LibraryFullStatusPb setStatus(LibraryInstallStatus status) { + this.status = status; + return this; + } + + public LibraryInstallStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LibraryFullStatusPb that = (LibraryFullStatusPb) o; + return Objects.equals(isLibraryForAllClusters, that.isLibraryForAllClusters) + && Objects.equals(library, that.library) + && Objects.equals(messages, that.messages) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(isLibraryForAllClusters, library, messages, status); + } + + @Override + public String toString() { + return new ToStringer(LibraryFullStatusPb.class) + .add("isLibraryForAllClusters", isLibraryForAllClusters) + .add("library", library) + .add("messages", messages) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryPb.java new file mode 100755 index 000000000..c8fe72a7a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryPb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LibraryPb { + @JsonProperty("cran") + private RCranLibrary cran; + + @JsonProperty("egg") + private String egg; + + @JsonProperty("jar") + private String jar; + + @JsonProperty("maven") + private MavenLibrary maven; + + @JsonProperty("pypi") + private PythonPyPiLibrary pypi; + + @JsonProperty("requirements") + private String requirements; + + @JsonProperty("whl") + private String whl; + + public LibraryPb setCran(RCranLibrary cran) { + this.cran = cran; + return this; + } + + public RCranLibrary getCran() { + return cran; + } + + public LibraryPb setEgg(String egg) { + this.egg = egg; + return this; + } + + public String getEgg() { + return egg; + } + + public LibraryPb setJar(String jar) { + this.jar = jar; + return this; + } + + public String getJar() { + return jar; + } + + public LibraryPb setMaven(MavenLibrary maven) { + this.maven = maven; + return this; + } + + public MavenLibrary getMaven() { + return maven; + } + + public LibraryPb setPypi(PythonPyPiLibrary pypi) { + this.pypi = pypi; + return this; + } + + public PythonPyPiLibrary getPypi() { + return pypi; + } + + public LibraryPb setRequirements(String requirements) { + this.requirements = requirements; + return this; + } + + public String getRequirements() { + return requirements; + } + + public LibraryPb setWhl(String whl) { + this.whl = whl; + return this; + } + + public String getWhl() { + return whl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LibraryPb that = (LibraryPb) o; + return Objects.equals(cran, that.cran) + && Objects.equals(egg, that.egg) + && Objects.equals(jar, that.jar) + && Objects.equals(maven, that.maven) + && Objects.equals(pypi, that.pypi) + && Objects.equals(requirements, that.requirements) + && Objects.equals(whl, that.whl); + } + + @Override + public int hashCode() { + return Objects.hash(cran, egg, jar, maven, pypi, requirements, whl); + } + + @Override + public String toString() { + return new ToStringer(LibraryPb.class) + .add("cran", cran) + .add("egg", egg) + .add("jar", jar) + .add("maven", maven) + .add("pypi", pypi) + .add("requirements", requirements) + .add("whl", whl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponse.java index acfffa35b..f841937f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponse.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListAllClusterLibraryStatusesResponse.ListAllClusterLibraryStatusesResponseSerializer.class) +@JsonDeserialize( + using = + ListAllClusterLibraryStatusesResponse.ListAllClusterLibraryStatusesResponseDeserializer + .class) public class ListAllClusterLibraryStatusesResponse { /** A list of cluster statuses. */ - @JsonProperty("statuses") private Collection statuses; public ListAllClusterLibraryStatusesResponse setStatuses( @@ -43,4 +58,42 @@ public String toString() { .add("statuses", statuses) .toString(); } + + ListAllClusterLibraryStatusesResponsePb toPb() { + ListAllClusterLibraryStatusesResponsePb pb = new ListAllClusterLibraryStatusesResponsePb(); + pb.setStatuses(statuses); + + return pb; + } + + static ListAllClusterLibraryStatusesResponse fromPb(ListAllClusterLibraryStatusesResponsePb pb) { + ListAllClusterLibraryStatusesResponse model = new ListAllClusterLibraryStatusesResponse(); + model.setStatuses(pb.getStatuses()); + + return model; + } + + public static class ListAllClusterLibraryStatusesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAllClusterLibraryStatusesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAllClusterLibraryStatusesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAllClusterLibraryStatusesResponseDeserializer + extends JsonDeserializer { + @Override + public ListAllClusterLibraryStatusesResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAllClusterLibraryStatusesResponsePb pb = + mapper.readValue(p, ListAllClusterLibraryStatusesResponsePb.class); + return ListAllClusterLibraryStatusesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponsePb.java new file mode 100755 index 000000000..7a29ebc13 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAllClusterLibraryStatusesResponsePb { + @JsonProperty("statuses") + private Collection statuses; + + public ListAllClusterLibraryStatusesResponsePb setStatuses( + Collection statuses) { + this.statuses = statuses; + return this; + } + + public Collection getStatuses() { + return statuses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllClusterLibraryStatusesResponsePb that = (ListAllClusterLibraryStatusesResponsePb) o; + return Objects.equals(statuses, that.statuses); + } + + @Override + public int hashCode() { + return Objects.hash(statuses); + } + + @Override + public String toString() { + return new ToStringer(ListAllClusterLibraryStatusesResponsePb.class) + .add("statuses", statuses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java index aa7b0a6dd..799190c01 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAvailableZonesResponse.ListAvailableZonesResponseSerializer.class) +@JsonDeserialize(using = ListAvailableZonesResponse.ListAvailableZonesResponseDeserializer.class) public class ListAvailableZonesResponse { /** The availability zone if no ``zone_id`` is provided in the cluster creation request. */ - @JsonProperty("default_zone") private String defaultZone; /** The list of available zones (e.g., ['us-west-2c', 'us-east-2']). */ - @JsonProperty("zones") private Collection zones; public ListAvailableZonesResponse setDefaultZone(String defaultZone) { @@ -56,4 +65,43 @@ public String toString() { .add("zones", zones) .toString(); } + + ListAvailableZonesResponsePb toPb() { + ListAvailableZonesResponsePb pb = new ListAvailableZonesResponsePb(); + pb.setDefaultZone(defaultZone); + pb.setZones(zones); + + return pb; + } + + static ListAvailableZonesResponse fromPb(ListAvailableZonesResponsePb pb) { + ListAvailableZonesResponse model = new ListAvailableZonesResponse(); + model.setDefaultZone(pb.getDefaultZone()); + model.setZones(pb.getZones()); + + return model; + } + + public static class ListAvailableZonesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAvailableZonesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAvailableZonesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAvailableZonesResponseDeserializer + extends JsonDeserializer { + @Override + public ListAvailableZonesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAvailableZonesResponsePb pb = mapper.readValue(p, ListAvailableZonesResponsePb.class); + return ListAvailableZonesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponsePb.java new file mode 100755 index 000000000..7e125b873 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAvailableZonesResponsePb { + @JsonProperty("default_zone") + private String defaultZone; + + @JsonProperty("zones") + private Collection zones; + + public ListAvailableZonesResponsePb setDefaultZone(String defaultZone) { + this.defaultZone = defaultZone; + return this; + } + + public String getDefaultZone() { + return defaultZone; + } + + public ListAvailableZonesResponsePb setZones(Collection zones) { + this.zones = zones; + return this; + } + + public Collection getZones() { + return zones; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAvailableZonesResponsePb that = (ListAvailableZonesResponsePb) o; + return Objects.equals(defaultZone, that.defaultZone) && Objects.equals(zones, that.zones); + } + + @Override + public int hashCode() { + return Objects.hash(defaultZone, zones); + } + + @Override + public String toString() { + return new ToStringer(ListAvailableZonesResponsePb.class) + .add("defaultZone", defaultZone) + .add("zones", zones) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java index 1347905b7..d475a0c76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java @@ -3,33 +3,38 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List cluster policy compliance */ @Generated +@JsonSerialize(using = ListClusterCompliancesRequest.ListClusterCompliancesRequestSerializer.class) +@JsonDeserialize( + using = ListClusterCompliancesRequest.ListClusterCompliancesRequestDeserializer.class) public class ListClusterCompliancesRequest { /** * Use this field to specify the maximum number of results to be returned by the server. The * server may further constrain the maximum number of results returned in a single page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * A page token that can be used to navigate to the next page or previous page as returned by * `next_page_token` or `prev_page_token`. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Canonical unique identifier for the cluster policy. */ - @JsonIgnore - @QueryParam("policy_id") private String policyId; public ListClusterCompliancesRequest setPageSize(Long pageSize) { @@ -82,4 +87,46 @@ public String toString() { .add("policyId", policyId) .toString(); } + + ListClusterCompliancesRequestPb toPb() { + ListClusterCompliancesRequestPb pb = new ListClusterCompliancesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setPolicyId(policyId); + + return pb; + } + + static ListClusterCompliancesRequest fromPb(ListClusterCompliancesRequestPb pb) { + ListClusterCompliancesRequest model = new ListClusterCompliancesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class ListClusterCompliancesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListClusterCompliancesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClusterCompliancesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClusterCompliancesRequestDeserializer + extends JsonDeserializer { + @Override + public ListClusterCompliancesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClusterCompliancesRequestPb pb = + mapper.readValue(p, ListClusterCompliancesRequestPb.class); + return ListClusterCompliancesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequestPb.java new file mode 100755 index 000000000..578f9d467 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List cluster policy compliance */ +@Generated +class ListClusterCompliancesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("policy_id") + private String policyId; + + public ListClusterCompliancesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListClusterCompliancesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListClusterCompliancesRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClusterCompliancesRequestPb that = (ListClusterCompliancesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, policyId); + } + + @Override + public String toString() { + return new ToStringer(ListClusterCompliancesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponse.java index 71e218156..4ca938d1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponse.java @@ -4,28 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListClusterCompliancesResponse.ListClusterCompliancesResponseSerializer.class) +@JsonDeserialize( + using = ListClusterCompliancesResponse.ListClusterCompliancesResponseDeserializer.class) public class ListClusterCompliancesResponse { /** A list of clusters and their policy compliance statuses. */ - @JsonProperty("clusters") private Collection clusters; /** * This field represents the pagination token to retrieve the next page of results. If the value * is "", it means no further results for the request. */ - @JsonProperty("next_page_token") private String nextPageToken; /** * This field represents the pagination token to retrieve the previous page of results. If the * value is "", it means no further results for the request. */ - @JsonProperty("prev_page_token") private String prevPageToken; public ListClusterCompliancesResponse setClusters(Collection clusters) { @@ -78,4 +88,46 @@ public String toString() { .add("prevPageToken", prevPageToken) .toString(); } + + ListClusterCompliancesResponsePb toPb() { + ListClusterCompliancesResponsePb pb = new ListClusterCompliancesResponsePb(); + pb.setClusters(clusters); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + + return pb; + } + + static ListClusterCompliancesResponse fromPb(ListClusterCompliancesResponsePb pb) { + ListClusterCompliancesResponse model = new ListClusterCompliancesResponse(); + model.setClusters(pb.getClusters()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + + return model; + } + + public static class ListClusterCompliancesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListClusterCompliancesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClusterCompliancesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClusterCompliancesResponseDeserializer + extends JsonDeserializer { + @Override + public ListClusterCompliancesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClusterCompliancesResponsePb pb = + mapper.readValue(p, ListClusterCompliancesResponsePb.class); + return ListClusterCompliancesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponsePb.java new file mode 100755 index 000000000..ae2891f4e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListClusterCompliancesResponsePb { + @JsonProperty("clusters") + private Collection clusters; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + public ListClusterCompliancesResponsePb setClusters(Collection clusters) { + this.clusters = clusters; + return this; + } + + public Collection getClusters() { + return clusters; + } + + public ListClusterCompliancesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListClusterCompliancesResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClusterCompliancesResponsePb that = (ListClusterCompliancesResponsePb) o; + return Objects.equals(clusters, that.clusters) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(clusters, nextPageToken, prevPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListClusterCompliancesResponsePb.class) + .add("clusters", clusters) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java index fa8edb3dc..51bbf4108 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java @@ -3,28 +3,34 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List cluster policies */ @Generated +@JsonSerialize(using = ListClusterPoliciesRequest.ListClusterPoliciesRequestSerializer.class) +@JsonDeserialize(using = ListClusterPoliciesRequest.ListClusterPoliciesRequestDeserializer.class) public class ListClusterPoliciesRequest { /** * The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy * creation time. * `POLICY_NAME` - Sort result list by policy name. */ - @JsonIgnore - @QueryParam("sort_column") private ListSortColumn sortColumn; /** * The order in which the policies get listed. * `DESC` - Sort result list in descending order. * * `ASC` - Sort result list in ascending order. */ - @JsonIgnore - @QueryParam("sort_order") private ListSortOrder sortOrder; public ListClusterPoliciesRequest setSortColumn(ListSortColumn sortColumn) { @@ -65,4 +71,43 @@ public String toString() { .add("sortOrder", sortOrder) .toString(); } + + ListClusterPoliciesRequestPb toPb() { + ListClusterPoliciesRequestPb pb = new ListClusterPoliciesRequestPb(); + pb.setSortColumn(sortColumn); + pb.setSortOrder(sortOrder); + + return pb; + } + + static ListClusterPoliciesRequest fromPb(ListClusterPoliciesRequestPb pb) { + ListClusterPoliciesRequest model = new ListClusterPoliciesRequest(); + model.setSortColumn(pb.getSortColumn()); + model.setSortOrder(pb.getSortOrder()); + + return model; + } + + public static class ListClusterPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListClusterPoliciesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClusterPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClusterPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListClusterPoliciesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClusterPoliciesRequestPb pb = mapper.readValue(p, ListClusterPoliciesRequestPb.class); + return ListClusterPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequestPb.java new file mode 100755 index 000000000..ecb3cc9bd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List cluster policies */ +@Generated +class ListClusterPoliciesRequestPb { + @JsonIgnore + @QueryParam("sort_column") + private ListSortColumn sortColumn; + + @JsonIgnore + @QueryParam("sort_order") + private ListSortOrder sortOrder; + + public ListClusterPoliciesRequestPb setSortColumn(ListSortColumn sortColumn) { + this.sortColumn = sortColumn; + return this; + } + + public ListSortColumn getSortColumn() { + return sortColumn; + } + + public ListClusterPoliciesRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClusterPoliciesRequestPb that = (ListClusterPoliciesRequestPb) o; + return Objects.equals(sortColumn, that.sortColumn) && Objects.equals(sortOrder, that.sortOrder); + } + + @Override + public int hashCode() { + return Objects.hash(sortColumn, sortOrder); + } + + @Override + public String toString() { + return new ToStringer(ListClusterPoliciesRequestPb.class) + .add("sortColumn", sortColumn) + .add("sortOrder", sortOrder) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterBy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterBy.java index 92e0e4e28..5a0ce12ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterBy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterBy.java @@ -3,32 +3,34 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListClustersFilterBy.ListClustersFilterBySerializer.class) +@JsonDeserialize(using = ListClustersFilterBy.ListClustersFilterByDeserializer.class) public class ListClustersFilterBy { /** The source of cluster creation. */ - @JsonProperty("cluster_sources") - @QueryParam("cluster_sources") private Collection clusterSources; /** The current state of the clusters. */ - @JsonProperty("cluster_states") - @QueryParam("cluster_states") private Collection clusterStates; /** Whether the clusters are pinned or not. */ - @JsonProperty("is_pinned") - @QueryParam("is_pinned") private Boolean isPinned; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") - @QueryParam("policy_id") private String policyId; public ListClustersFilterBy setClusterSources(Collection clusterSources) { @@ -92,4 +94,46 @@ public String toString() { .add("policyId", policyId) .toString(); } + + ListClustersFilterByPb toPb() { + ListClustersFilterByPb pb = new ListClustersFilterByPb(); + pb.setClusterSources(clusterSources); + pb.setClusterStates(clusterStates); + pb.setIsPinned(isPinned); + pb.setPolicyId(policyId); + + return pb; + } + + static ListClustersFilterBy fromPb(ListClustersFilterByPb pb) { + ListClustersFilterBy model = new ListClustersFilterBy(); + model.setClusterSources(pb.getClusterSources()); + model.setClusterStates(pb.getClusterStates()); + model.setIsPinned(pb.getIsPinned()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class ListClustersFilterBySerializer extends JsonSerializer { + @Override + public void serialize( + ListClustersFilterBy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClustersFilterByPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClustersFilterByDeserializer + extends JsonDeserializer { + @Override + public ListClustersFilterBy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClustersFilterByPb pb = mapper.readValue(p, ListClustersFilterByPb.class); + return ListClustersFilterBy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterByPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterByPb.java new file mode 100755 index 000000000..d87a80e43 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersFilterByPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListClustersFilterByPb { + @JsonProperty("cluster_sources") + @QueryParam("cluster_sources") + private Collection clusterSources; + + @JsonProperty("cluster_states") + @QueryParam("cluster_states") + private Collection clusterStates; + + @JsonProperty("is_pinned") + @QueryParam("is_pinned") + private Boolean isPinned; + + @JsonProperty("policy_id") + @QueryParam("policy_id") + private String policyId; + + public ListClustersFilterByPb setClusterSources(Collection clusterSources) { + this.clusterSources = clusterSources; + return this; + } + + public Collection getClusterSources() { + return clusterSources; + } + + public ListClustersFilterByPb setClusterStates(Collection clusterStates) { + this.clusterStates = clusterStates; + return this; + } + + public Collection getClusterStates() { + return clusterStates; + } + + public ListClustersFilterByPb setIsPinned(Boolean isPinned) { + this.isPinned = isPinned; + return this; + } + + public Boolean getIsPinned() { + return isPinned; + } + + public ListClustersFilterByPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClustersFilterByPb that = (ListClustersFilterByPb) o; + return Objects.equals(clusterSources, that.clusterSources) + && Objects.equals(clusterStates, that.clusterStates) + && Objects.equals(isPinned, that.isPinned) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterSources, clusterStates, isPinned, policyId); + } + + @Override + public String toString() { + return new ToStringer(ListClustersFilterByPb.class) + .add("clusterSources", clusterSources) + .add("clusterStates", clusterStates) + .add("isPinned", isPinned) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java index 7139f7763..67461baac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java @@ -3,38 +3,40 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List clusters */ @Generated +@JsonSerialize(using = ListClustersRequest.ListClustersRequestSerializer.class) +@JsonDeserialize(using = ListClustersRequest.ListClustersRequestDeserializer.class) public class ListClustersRequest { /** Filters to apply to the list of clusters. */ - @JsonIgnore - @QueryParam("filter_by") private ListClustersFilterBy filterBy; /** * Use this field to specify the maximum number of results to be returned by the server. The * server may further constrain the maximum number of results returned in a single page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * Use next_page_token or prev_page_token returned from the previous request to list the next or * previous page of clusters respectively. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Sort the list of clusters by a specific criteria. */ - @JsonIgnore - @QueryParam("sort_by") private ListClustersSortBy sortBy; public ListClustersRequest setFilterBy(ListClustersFilterBy filterBy) { @@ -98,4 +100,45 @@ public String toString() { .add("sortBy", sortBy) .toString(); } + + ListClustersRequestPb toPb() { + ListClustersRequestPb pb = new ListClustersRequestPb(); + pb.setFilterBy(filterBy); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setSortBy(sortBy); + + return pb; + } + + static ListClustersRequest fromPb(ListClustersRequestPb pb) { + ListClustersRequest model = new ListClustersRequest(); + model.setFilterBy(pb.getFilterBy()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setSortBy(pb.getSortBy()); + + return model; + } + + public static class ListClustersRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListClustersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClustersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClustersRequestDeserializer + extends JsonDeserializer { + @Override + public ListClustersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClustersRequestPb pb = mapper.readValue(p, ListClustersRequestPb.class); + return ListClustersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequestPb.java new file mode 100755 index 000000000..8da65456a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List clusters */ +@Generated +class ListClustersRequestPb { + @JsonIgnore + @QueryParam("filter_by") + private ListClustersFilterBy filterBy; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("sort_by") + private ListClustersSortBy sortBy; + + public ListClustersRequestPb setFilterBy(ListClustersFilterBy filterBy) { + this.filterBy = filterBy; + return this; + } + + public ListClustersFilterBy getFilterBy() { + return filterBy; + } + + public ListClustersRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListClustersRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListClustersRequestPb setSortBy(ListClustersSortBy sortBy) { + this.sortBy = sortBy; + return this; + } + + public ListClustersSortBy getSortBy() { + return sortBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClustersRequestPb that = (ListClustersRequestPb) o; + return Objects.equals(filterBy, that.filterBy) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(sortBy, that.sortBy); + } + + @Override + public int hashCode() { + return Objects.hash(filterBy, pageSize, pageToken, sortBy); + } + + @Override + public String toString() { + return new ToStringer(ListClustersRequestPb.class) + .add("filterBy", filterBy) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("sortBy", sortBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java index bbbbe0fb1..c6c41fac9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java @@ -4,28 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListClustersResponse.ListClustersResponseSerializer.class) +@JsonDeserialize(using = ListClustersResponse.ListClustersResponseDeserializer.class) public class ListClustersResponse { /** */ - @JsonProperty("clusters") private Collection clusters; /** * This field represents the pagination token to retrieve the next page of results. If the value * is "", it means no further results for the request. */ - @JsonProperty("next_page_token") private String nextPageToken; /** * This field represents the pagination token to retrieve the previous page of results. If the * value is "", it means no further results for the request. */ - @JsonProperty("prev_page_token") private String prevPageToken; public ListClustersResponse setClusters(Collection clusters) { @@ -78,4 +86,44 @@ public String toString() { .add("prevPageToken", prevPageToken) .toString(); } + + ListClustersResponsePb toPb() { + ListClustersResponsePb pb = new ListClustersResponsePb(); + pb.setClusters(clusters); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + + return pb; + } + + static ListClustersResponse fromPb(ListClustersResponsePb pb) { + ListClustersResponse model = new ListClustersResponse(); + model.setClusters(pb.getClusters()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + + return model; + } + + public static class ListClustersResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListClustersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClustersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClustersResponseDeserializer + extends JsonDeserializer { + @Override + public ListClustersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClustersResponsePb pb = mapper.readValue(p, ListClustersResponsePb.class); + return ListClustersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponsePb.java new file mode 100755 index 000000000..7f66e5ea6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListClustersResponsePb { + @JsonProperty("clusters") + private Collection clusters; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + public ListClustersResponsePb setClusters(Collection clusters) { + this.clusters = clusters; + return this; + } + + public Collection getClusters() { + return clusters; + } + + public ListClustersResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListClustersResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClustersResponsePb that = (ListClustersResponsePb) o; + return Objects.equals(clusters, that.clusters) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(clusters, nextPageToken, prevPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListClustersResponsePb.class) + .add("clusters", clusters) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java index cb977be58..630ea45e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java @@ -3,24 +3,30 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ListClustersSortBy.ListClustersSortBySerializer.class) +@JsonDeserialize(using = ListClustersSortBy.ListClustersSortByDeserializer.class) public class ListClustersSortBy { /** The direction to sort by. */ - @JsonProperty("direction") - @QueryParam("direction") private ListClustersSortByDirection direction; /** * The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest * precedence: cluster state, pinned or unpinned, then cluster name. */ - @JsonProperty("field") - @QueryParam("field") private ListClustersSortByField field; public ListClustersSortBy setDirection(ListClustersSortByDirection direction) { @@ -61,4 +67,40 @@ public String toString() { .add("field", field) .toString(); } + + ListClustersSortByPb toPb() { + ListClustersSortByPb pb = new ListClustersSortByPb(); + pb.setDirection(direction); + pb.setField(field); + + return pb; + } + + static ListClustersSortBy fromPb(ListClustersSortByPb pb) { + ListClustersSortBy model = new ListClustersSortBy(); + model.setDirection(pb.getDirection()); + model.setField(pb.getField()); + + return model; + } + + public static class ListClustersSortBySerializer extends JsonSerializer { + @Override + public void serialize(ListClustersSortBy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListClustersSortByPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListClustersSortByDeserializer extends JsonDeserializer { + @Override + public ListClustersSortBy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListClustersSortByPb pb = mapper.readValue(p, ListClustersSortByPb.class); + return ListClustersSortBy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByPb.java new file mode 100755 index 000000000..4a8d41e59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListClustersSortByPb { + @JsonProperty("direction") + @QueryParam("direction") + private ListClustersSortByDirection direction; + + @JsonProperty("field") + @QueryParam("field") + private ListClustersSortByField field; + + public ListClustersSortByPb setDirection(ListClustersSortByDirection direction) { + this.direction = direction; + return this; + } + + public ListClustersSortByDirection getDirection() { + return direction; + } + + public ListClustersSortByPb setField(ListClustersSortByField field) { + this.field = field; + return this; + } + + public ListClustersSortByField getField() { + return field; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListClustersSortByPb that = (ListClustersSortByPb) o; + return Objects.equals(direction, that.direction) && Objects.equals(field, that.field); + } + + @Override + public int hashCode() { + return Objects.hash(direction, field); + } + + @Override + public String toString() { + return new ToStringer(ListClustersSortByPb.class) + .add("direction", direction) + .add("field", field) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java index e155d95a4..e7f8abee5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListGlobalInitScriptsResponse.ListGlobalInitScriptsResponseSerializer.class) +@JsonDeserialize( + using = ListGlobalInitScriptsResponse.ListGlobalInitScriptsResponseDeserializer.class) public class ListGlobalInitScriptsResponse { /** */ - @JsonProperty("scripts") private Collection scripts; public ListGlobalInitScriptsResponse setScripts(Collection scripts) { @@ -40,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(ListGlobalInitScriptsResponse.class).add("scripts", scripts).toString(); } + + ListGlobalInitScriptsResponsePb toPb() { + ListGlobalInitScriptsResponsePb pb = new ListGlobalInitScriptsResponsePb(); + pb.setScripts(scripts); + + return pb; + } + + static ListGlobalInitScriptsResponse fromPb(ListGlobalInitScriptsResponsePb pb) { + ListGlobalInitScriptsResponse model = new ListGlobalInitScriptsResponse(); + model.setScripts(pb.getScripts()); + + return model; + } + + public static class ListGlobalInitScriptsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListGlobalInitScriptsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListGlobalInitScriptsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListGlobalInitScriptsResponseDeserializer + extends JsonDeserializer { + @Override + public ListGlobalInitScriptsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListGlobalInitScriptsResponsePb pb = + mapper.readValue(p, ListGlobalInitScriptsResponsePb.class); + return ListGlobalInitScriptsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponsePb.java new file mode 100755 index 000000000..6ed58af62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListGlobalInitScriptsResponsePb { + @JsonProperty("scripts") + private Collection scripts; + + public ListGlobalInitScriptsResponsePb setScripts(Collection scripts) { + this.scripts = scripts; + return this; + } + + public Collection getScripts() { + return scripts; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGlobalInitScriptsResponsePb that = (ListGlobalInitScriptsResponsePb) o; + return Objects.equals(scripts, that.scripts); + } + + @Override + public int hashCode() { + return Objects.hash(scripts); + } + + @Override + public String toString() { + return new ToStringer(ListGlobalInitScriptsResponsePb.class).add("scripts", scripts).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java index d31e13594..fdc5b832e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListInstancePools.ListInstancePoolsSerializer.class) +@JsonDeserialize(using = ListInstancePools.ListInstancePoolsDeserializer.class) public class ListInstancePools { /** */ - @JsonProperty("instance_pools") private Collection instancePools; public ListInstancePools setInstancePools(Collection instancePools) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListInstancePools.class).add("instancePools", instancePools).toString(); } + + ListInstancePoolsPb toPb() { + ListInstancePoolsPb pb = new ListInstancePoolsPb(); + pb.setInstancePools(instancePools); + + return pb; + } + + static ListInstancePools fromPb(ListInstancePoolsPb pb) { + ListInstancePools model = new ListInstancePools(); + model.setInstancePools(pb.getInstancePools()); + + return model; + } + + public static class ListInstancePoolsSerializer extends JsonSerializer { + @Override + public void serialize(ListInstancePools value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListInstancePoolsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListInstancePoolsDeserializer extends JsonDeserializer { + @Override + public ListInstancePools deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListInstancePoolsPb pb = mapper.readValue(p, ListInstancePoolsPb.class); + return ListInstancePools.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePoolsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePoolsPb.java new file mode 100755 index 000000000..a28ee6095 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePoolsPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListInstancePoolsPb { + @JsonProperty("instance_pools") + private Collection instancePools; + + public ListInstancePoolsPb setInstancePools(Collection instancePools) { + this.instancePools = instancePools; + return this; + } + + public Collection getInstancePools() { + return instancePools; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstancePoolsPb that = (ListInstancePoolsPb) o; + return Objects.equals(instancePools, that.instancePools); + } + + @Override + public int hashCode() { + return Objects.hash(instancePools); + } + + @Override + public String toString() { + return new ToStringer(ListInstancePoolsPb.class).add("instancePools", instancePools).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java index 5e697f13d..db0fa067b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListInstanceProfilesResponse.ListInstanceProfilesResponseSerializer.class) +@JsonDeserialize( + using = ListInstanceProfilesResponse.ListInstanceProfilesResponseDeserializer.class) public class ListInstanceProfilesResponse { /** A list of instance profiles that the user can access. */ - @JsonProperty("instance_profiles") private Collection instanceProfiles; public ListInstanceProfilesResponse setInstanceProfiles( @@ -43,4 +54,41 @@ public String toString() { .add("instanceProfiles", instanceProfiles) .toString(); } + + ListInstanceProfilesResponsePb toPb() { + ListInstanceProfilesResponsePb pb = new ListInstanceProfilesResponsePb(); + pb.setInstanceProfiles(instanceProfiles); + + return pb; + } + + static ListInstanceProfilesResponse fromPb(ListInstanceProfilesResponsePb pb) { + ListInstanceProfilesResponse model = new ListInstanceProfilesResponse(); + model.setInstanceProfiles(pb.getInstanceProfiles()); + + return model; + } + + public static class ListInstanceProfilesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListInstanceProfilesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListInstanceProfilesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListInstanceProfilesResponseDeserializer + extends JsonDeserializer { + @Override + public ListInstanceProfilesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListInstanceProfilesResponsePb pb = mapper.readValue(p, ListInstanceProfilesResponsePb.class); + return ListInstanceProfilesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponsePb.java new file mode 100755 index 000000000..1017d3945 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListInstanceProfilesResponsePb { + @JsonProperty("instance_profiles") + private Collection instanceProfiles; + + public ListInstanceProfilesResponsePb setInstanceProfiles( + Collection instanceProfiles) { + this.instanceProfiles = instanceProfiles; + return this; + } + + public Collection getInstanceProfiles() { + return instanceProfiles; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstanceProfilesResponsePb that = (ListInstanceProfilesResponsePb) o; + return Objects.equals(instanceProfiles, that.instanceProfiles); + } + + @Override + public int hashCode() { + return Objects.hash(instanceProfiles); + } + + @Override + public String toString() { + return new ToStringer(ListInstanceProfilesResponsePb.class) + .add("instanceProfiles", instanceProfiles) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponse.java index 201260e1c..10482672c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListNodeTypesResponse.ListNodeTypesResponseSerializer.class) +@JsonDeserialize(using = ListNodeTypesResponse.ListNodeTypesResponseDeserializer.class) public class ListNodeTypesResponse { /** The list of available Spark node types. */ - @JsonProperty("node_types") private Collection nodeTypes; public ListNodeTypesResponse setNodeTypes(Collection nodeTypes) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListNodeTypesResponse.class).add("nodeTypes", nodeTypes).toString(); } + + ListNodeTypesResponsePb toPb() { + ListNodeTypesResponsePb pb = new ListNodeTypesResponsePb(); + pb.setNodeTypes(nodeTypes); + + return pb; + } + + static ListNodeTypesResponse fromPb(ListNodeTypesResponsePb pb) { + ListNodeTypesResponse model = new ListNodeTypesResponse(); + model.setNodeTypes(pb.getNodeTypes()); + + return model; + } + + public static class ListNodeTypesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNodeTypesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNodeTypesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNodeTypesResponseDeserializer + extends JsonDeserializer { + @Override + public ListNodeTypesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNodeTypesResponsePb pb = mapper.readValue(p, ListNodeTypesResponsePb.class); + return ListNodeTypesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponsePb.java new file mode 100755 index 000000000..47fb86718 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListNodeTypesResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListNodeTypesResponsePb { + @JsonProperty("node_types") + private Collection nodeTypes; + + public ListNodeTypesResponsePb setNodeTypes(Collection nodeTypes) { + this.nodeTypes = nodeTypes; + return this; + } + + public Collection getNodeTypes() { + return nodeTypes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNodeTypesResponsePb that = (ListNodeTypesResponsePb) o; + return Objects.equals(nodeTypes, that.nodeTypes); + } + + @Override + public int hashCode() { + return Objects.hash(nodeTypes); + } + + @Override + public String toString() { + return new ToStringer(ListNodeTypesResponsePb.class).add("nodeTypes", nodeTypes).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponse.java index 6962101a6..33bea5ef4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListPoliciesResponse.ListPoliciesResponseSerializer.class) +@JsonDeserialize(using = ListPoliciesResponse.ListPoliciesResponseDeserializer.class) public class ListPoliciesResponse { /** List of policies. */ - @JsonProperty("policies") private Collection policies; public ListPoliciesResponse setPolicies(Collection policies) { @@ -40,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(ListPoliciesResponse.class).add("policies", policies).toString(); } + + ListPoliciesResponsePb toPb() { + ListPoliciesResponsePb pb = new ListPoliciesResponsePb(); + pb.setPolicies(policies); + + return pb; + } + + static ListPoliciesResponse fromPb(ListPoliciesResponsePb pb) { + ListPoliciesResponse model = new ListPoliciesResponse(); + model.setPolicies(pb.getPolicies()); + + return model; + } + + public static class ListPoliciesResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListPoliciesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPoliciesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPoliciesResponseDeserializer + extends JsonDeserializer { + @Override + public ListPoliciesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPoliciesResponsePb pb = mapper.readValue(p, ListPoliciesResponsePb.class); + return ListPoliciesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponsePb.java new file mode 100755 index 000000000..10125e477 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPoliciesResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListPoliciesResponsePb { + @JsonProperty("policies") + private Collection policies; + + public ListPoliciesResponsePb setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPoliciesResponsePb that = (ListPoliciesResponsePb) o; + return Objects.equals(policies, that.policies); + } + + @Override + public int hashCode() { + return Objects.hash(policies); + } + + @Override + public String toString() { + return new ToStringer(ListPoliciesResponsePb.class).add("policies", policies).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java index f267ac127..e77f1ac84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List policy families */ @Generated +@JsonSerialize(using = ListPolicyFamiliesRequest.ListPolicyFamiliesRequestSerializer.class) +@JsonDeserialize(using = ListPolicyFamiliesRequest.ListPolicyFamiliesRequestDeserializer.class) public class ListPolicyFamiliesRequest { /** Maximum number of policy families to return. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** A token that can be used to get the next page of results. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListPolicyFamiliesRequest setMaxResults(Long maxResults) { @@ -59,4 +65,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListPolicyFamiliesRequestPb toPb() { + ListPolicyFamiliesRequestPb pb = new ListPolicyFamiliesRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListPolicyFamiliesRequest fromPb(ListPolicyFamiliesRequestPb pb) { + ListPolicyFamiliesRequest model = new ListPolicyFamiliesRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListPolicyFamiliesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPolicyFamiliesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPolicyFamiliesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPolicyFamiliesRequestDeserializer + extends JsonDeserializer { + @Override + public ListPolicyFamiliesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPolicyFamiliesRequestPb pb = mapper.readValue(p, ListPolicyFamiliesRequestPb.class); + return ListPolicyFamiliesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequestPb.java new file mode 100755 index 000000000..31474edc3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List policy families */ +@Generated +class ListPolicyFamiliesRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListPolicyFamiliesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListPolicyFamiliesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPolicyFamiliesRequestPb that = (ListPolicyFamiliesRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPolicyFamiliesRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponse.java index f8528cd78..a6b698043 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListPolicyFamiliesResponse.ListPolicyFamiliesResponseSerializer.class) +@JsonDeserialize(using = ListPolicyFamiliesResponse.ListPolicyFamiliesResponseDeserializer.class) public class ListPolicyFamiliesResponse { /** * A token that can be used to get the next page of results. If not present, there are no more * results to show. */ - @JsonProperty("next_page_token") private String nextPageToken; /** List of policy families. */ - @JsonProperty("policy_families") private Collection policyFamilies; public ListPolicyFamiliesResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("policyFamilies", policyFamilies) .toString(); } + + ListPolicyFamiliesResponsePb toPb() { + ListPolicyFamiliesResponsePb pb = new ListPolicyFamiliesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPolicyFamilies(policyFamilies); + + return pb; + } + + static ListPolicyFamiliesResponse fromPb(ListPolicyFamiliesResponsePb pb) { + ListPolicyFamiliesResponse model = new ListPolicyFamiliesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPolicyFamilies(pb.getPolicyFamilies()); + + return model; + } + + public static class ListPolicyFamiliesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPolicyFamiliesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPolicyFamiliesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPolicyFamiliesResponseDeserializer + extends JsonDeserializer { + @Override + public ListPolicyFamiliesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPolicyFamiliesResponsePb pb = mapper.readValue(p, ListPolicyFamiliesResponsePb.class); + return ListPolicyFamiliesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponsePb.java new file mode 100755 index 000000000..65231cdcc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListPolicyFamiliesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("policy_families") + private Collection policyFamilies; + + public ListPolicyFamiliesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListPolicyFamiliesResponsePb setPolicyFamilies(Collection policyFamilies) { + this.policyFamilies = policyFamilies; + return this; + } + + public Collection getPolicyFamilies() { + return policyFamilies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPolicyFamiliesResponsePb that = (ListPolicyFamiliesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policyFamilies, that.policyFamilies); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policyFamilies); + } + + @Override + public String toString() { + return new ToStringer(ListPolicyFamiliesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("policyFamilies", policyFamilies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java index 1e5531b5f..63652da16 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LocalFileInfo.LocalFileInfoSerializer.class) +@JsonDeserialize(using = LocalFileInfo.LocalFileInfoDeserializer.class) public class LocalFileInfo { /** local file destination, e.g. `file:/my/local/file.sh` */ - @JsonProperty("destination") private String destination; public LocalFileInfo setDestination(String destination) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(LocalFileInfo.class).add("destination", destination).toString(); } + + LocalFileInfoPb toPb() { + LocalFileInfoPb pb = new LocalFileInfoPb(); + pb.setDestination(destination); + + return pb; + } + + static LocalFileInfo fromPb(LocalFileInfoPb pb) { + LocalFileInfo model = new LocalFileInfo(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class LocalFileInfoSerializer extends JsonSerializer { + @Override + public void serialize(LocalFileInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LocalFileInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LocalFileInfoDeserializer extends JsonDeserializer { + @Override + public LocalFileInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LocalFileInfoPb pb = mapper.readValue(p, LocalFileInfoPb.class); + return LocalFileInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfoPb.java new file mode 100755 index 000000000..72f82624f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfoPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LocalFileInfoPb { + @JsonProperty("destination") + private String destination; + + public LocalFileInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LocalFileInfoPb that = (LocalFileInfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(LocalFileInfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java index af2e30072..78012c424 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogAnalyticsInfo.LogAnalyticsInfoSerializer.class) +@JsonDeserialize(using = LogAnalyticsInfo.LogAnalyticsInfoDeserializer.class) public class LogAnalyticsInfo { /** */ - @JsonProperty("log_analytics_primary_key") private String logAnalyticsPrimaryKey; /** */ - @JsonProperty("log_analytics_workspace_id") private String logAnalyticsWorkspaceId; public LogAnalyticsInfo setLogAnalyticsPrimaryKey(String logAnalyticsPrimaryKey) { @@ -56,4 +65,40 @@ public String toString() { .add("logAnalyticsWorkspaceId", logAnalyticsWorkspaceId) .toString(); } + + LogAnalyticsInfoPb toPb() { + LogAnalyticsInfoPb pb = new LogAnalyticsInfoPb(); + pb.setLogAnalyticsPrimaryKey(logAnalyticsPrimaryKey); + pb.setLogAnalyticsWorkspaceId(logAnalyticsWorkspaceId); + + return pb; + } + + static LogAnalyticsInfo fromPb(LogAnalyticsInfoPb pb) { + LogAnalyticsInfo model = new LogAnalyticsInfo(); + model.setLogAnalyticsPrimaryKey(pb.getLogAnalyticsPrimaryKey()); + model.setLogAnalyticsWorkspaceId(pb.getLogAnalyticsWorkspaceId()); + + return model; + } + + public static class LogAnalyticsInfoSerializer extends JsonSerializer { + @Override + public void serialize(LogAnalyticsInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogAnalyticsInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogAnalyticsInfoDeserializer extends JsonDeserializer { + @Override + public LogAnalyticsInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogAnalyticsInfoPb pb = mapper.readValue(p, LogAnalyticsInfoPb.class); + return LogAnalyticsInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfoPb.java new file mode 100755 index 000000000..ade3560ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfoPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LogAnalyticsInfoPb { + @JsonProperty("log_analytics_primary_key") + private String logAnalyticsPrimaryKey; + + @JsonProperty("log_analytics_workspace_id") + private String logAnalyticsWorkspaceId; + + public LogAnalyticsInfoPb setLogAnalyticsPrimaryKey(String logAnalyticsPrimaryKey) { + this.logAnalyticsPrimaryKey = logAnalyticsPrimaryKey; + return this; + } + + public String getLogAnalyticsPrimaryKey() { + return logAnalyticsPrimaryKey; + } + + public LogAnalyticsInfoPb setLogAnalyticsWorkspaceId(String logAnalyticsWorkspaceId) { + this.logAnalyticsWorkspaceId = logAnalyticsWorkspaceId; + return this; + } + + public String getLogAnalyticsWorkspaceId() { + return logAnalyticsWorkspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogAnalyticsInfoPb that = (LogAnalyticsInfoPb) o; + return Objects.equals(logAnalyticsPrimaryKey, that.logAnalyticsPrimaryKey) + && Objects.equals(logAnalyticsWorkspaceId, that.logAnalyticsWorkspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(logAnalyticsPrimaryKey, logAnalyticsWorkspaceId); + } + + @Override + public String toString() { + return new ToStringer(LogAnalyticsInfoPb.class) + .add("logAnalyticsPrimaryKey", logAnalyticsPrimaryKey) + .add("logAnalyticsWorkspaceId", logAnalyticsWorkspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java index 9bd528bc4..ab3999cb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The log delivery status */ @Generated +@JsonSerialize(using = LogSyncStatus.LogSyncStatusSerializer.class) +@JsonDeserialize(using = LogSyncStatus.LogSyncStatusDeserializer.class) public class LogSyncStatus { /** * The timestamp of last attempt. If the last attempt fails, `last_exception` will contain the * exception in the last attempt. */ - @JsonProperty("last_attempted") private Long lastAttempted; /** * The exception thrown in the last attempt, it would be null (omitted in the response) if there * is no exception in last attempted. */ - @JsonProperty("last_exception") private String lastException; public LogSyncStatus setLastAttempted(Long lastAttempted) { @@ -63,4 +72,39 @@ public String toString() { .add("lastException", lastException) .toString(); } + + LogSyncStatusPb toPb() { + LogSyncStatusPb pb = new LogSyncStatusPb(); + pb.setLastAttempted(lastAttempted); + pb.setLastException(lastException); + + return pb; + } + + static LogSyncStatus fromPb(LogSyncStatusPb pb) { + LogSyncStatus model = new LogSyncStatus(); + model.setLastAttempted(pb.getLastAttempted()); + model.setLastException(pb.getLastException()); + + return model; + } + + public static class LogSyncStatusSerializer extends JsonSerializer { + @Override + public void serialize(LogSyncStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogSyncStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogSyncStatusDeserializer extends JsonDeserializer { + @Override + public LogSyncStatus deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogSyncStatusPb pb = mapper.readValue(p, LogSyncStatusPb.class); + return LogSyncStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatusPb.java new file mode 100755 index 000000000..7f9bbebc6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatusPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The log delivery status */ +@Generated +class LogSyncStatusPb { + @JsonProperty("last_attempted") + private Long lastAttempted; + + @JsonProperty("last_exception") + private String lastException; + + public LogSyncStatusPb setLastAttempted(Long lastAttempted) { + this.lastAttempted = lastAttempted; + return this; + } + + public Long getLastAttempted() { + return lastAttempted; + } + + public LogSyncStatusPb setLastException(String lastException) { + this.lastException = lastException; + return this; + } + + public String getLastException() { + return lastException; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogSyncStatusPb that = (LogSyncStatusPb) o; + return Objects.equals(lastAttempted, that.lastAttempted) + && Objects.equals(lastException, that.lastException); + } + + @Override + public int hashCode() { + return Objects.hash(lastAttempted, lastException); + } + + @Override + public String toString() { + return new ToStringer(LogSyncStatusPb.class) + .add("lastAttempted", lastAttempted) + .add("lastException", lastException) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java index a33b0e227..ee14518ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = MavenLibrary.MavenLibrarySerializer.class) +@JsonDeserialize(using = MavenLibrary.MavenLibraryDeserializer.class) public class MavenLibrary { /** Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". */ - @JsonProperty("coordinates") private String coordinates; /** @@ -20,14 +30,12 @@ public class MavenLibrary { *

Maven dependency exclusions: * https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html. */ - @JsonProperty("exclusions") private Collection exclusions; /** * Maven repo to install the Maven package from. If omitted, both Maven Central Repository and * Spark Packages are searched. */ - @JsonProperty("repo") private String repo; public MavenLibrary setCoordinates(String coordinates) { @@ -80,4 +88,41 @@ public String toString() { .add("repo", repo) .toString(); } + + MavenLibraryPb toPb() { + MavenLibraryPb pb = new MavenLibraryPb(); + pb.setCoordinates(coordinates); + pb.setExclusions(exclusions); + pb.setRepo(repo); + + return pb; + } + + static MavenLibrary fromPb(MavenLibraryPb pb) { + MavenLibrary model = new MavenLibrary(); + model.setCoordinates(pb.getCoordinates()); + model.setExclusions(pb.getExclusions()); + model.setRepo(pb.getRepo()); + + return model; + } + + public static class MavenLibrarySerializer extends JsonSerializer { + @Override + public void serialize(MavenLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MavenLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MavenLibraryDeserializer extends JsonDeserializer { + @Override + public MavenLibrary deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MavenLibraryPb pb = mapper.readValue(p, MavenLibraryPb.class); + return MavenLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibraryPb.java new file mode 100755 index 000000000..02d4c54ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibraryPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class MavenLibraryPb { + @JsonProperty("coordinates") + private String coordinates; + + @JsonProperty("exclusions") + private Collection exclusions; + + @JsonProperty("repo") + private String repo; + + public MavenLibraryPb setCoordinates(String coordinates) { + this.coordinates = coordinates; + return this; + } + + public String getCoordinates() { + return coordinates; + } + + public MavenLibraryPb setExclusions(Collection exclusions) { + this.exclusions = exclusions; + return this; + } + + public Collection getExclusions() { + return exclusions; + } + + public MavenLibraryPb setRepo(String repo) { + this.repo = repo; + return this; + } + + public String getRepo() { + return repo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MavenLibraryPb that = (MavenLibraryPb) o; + return Objects.equals(coordinates, that.coordinates) + && Objects.equals(exclusions, that.exclusions) + && Objects.equals(repo, that.repo); + } + + @Override + public int hashCode() { + return Objects.hash(coordinates, exclusions, repo); + } + + @Override + public String toString() { + return new ToStringer(MavenLibraryPb.class) + .add("coordinates", coordinates) + .add("exclusions", exclusions) + .add("repo", repo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java index 184999dbd..28511cd8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,25 +22,22 @@ * in the future (which is likely) */ @Generated +@JsonSerialize(using = NodeInstanceType.NodeInstanceTypeSerializer.class) +@JsonDeserialize(using = NodeInstanceType.NodeInstanceTypeDeserializer.class) public class NodeInstanceType { /** Unique identifier across instance types */ - @JsonProperty("instance_type_id") private String instanceTypeId; /** Size of the individual local disks attached to this instance (i.e. per local disk). */ - @JsonProperty("local_disk_size_gb") private Long localDiskSizeGb; /** Number of local disks that are present on this instance. */ - @JsonProperty("local_disks") private Long localDisks; /** Size of the individual local nvme disks attached to this instance (i.e. per local disk). */ - @JsonProperty("local_nvme_disk_size_gb") private Long localNvmeDiskSizeGb; /** Number of local nvme disks that are present on this instance. */ - @JsonProperty("local_nvme_disks") private Long localNvmeDisks; public NodeInstanceType setInstanceTypeId(String instanceTypeId) { @@ -107,4 +113,46 @@ public String toString() { .add("localNvmeDisks", localNvmeDisks) .toString(); } + + NodeInstanceTypePb toPb() { + NodeInstanceTypePb pb = new NodeInstanceTypePb(); + pb.setInstanceTypeId(instanceTypeId); + pb.setLocalDiskSizeGb(localDiskSizeGb); + pb.setLocalDisks(localDisks); + pb.setLocalNvmeDiskSizeGb(localNvmeDiskSizeGb); + pb.setLocalNvmeDisks(localNvmeDisks); + + return pb; + } + + static NodeInstanceType fromPb(NodeInstanceTypePb pb) { + NodeInstanceType model = new NodeInstanceType(); + model.setInstanceTypeId(pb.getInstanceTypeId()); + model.setLocalDiskSizeGb(pb.getLocalDiskSizeGb()); + model.setLocalDisks(pb.getLocalDisks()); + model.setLocalNvmeDiskSizeGb(pb.getLocalNvmeDiskSizeGb()); + model.setLocalNvmeDisks(pb.getLocalNvmeDisks()); + + return model; + } + + public static class NodeInstanceTypeSerializer extends JsonSerializer { + @Override + public void serialize(NodeInstanceType value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NodeInstanceTypePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NodeInstanceTypeDeserializer extends JsonDeserializer { + @Override + public NodeInstanceType deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NodeInstanceTypePb pb = mapper.readValue(p, NodeInstanceTypePb.class); + return NodeInstanceType.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceTypePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceTypePb.java new file mode 100755 index 000000000..4ca6436bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceTypePb.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * This structure embodies the machine type that hosts spark containers Note: this should be an + * internal data structure for now It is defined in proto in case we want to send it over the wire + * in the future (which is likely) + */ +@Generated +class NodeInstanceTypePb { + @JsonProperty("instance_type_id") + private String instanceTypeId; + + @JsonProperty("local_disk_size_gb") + private Long localDiskSizeGb; + + @JsonProperty("local_disks") + private Long localDisks; + + @JsonProperty("local_nvme_disk_size_gb") + private Long localNvmeDiskSizeGb; + + @JsonProperty("local_nvme_disks") + private Long localNvmeDisks; + + public NodeInstanceTypePb setInstanceTypeId(String instanceTypeId) { + this.instanceTypeId = instanceTypeId; + return this; + } + + public String getInstanceTypeId() { + return instanceTypeId; + } + + public NodeInstanceTypePb setLocalDiskSizeGb(Long localDiskSizeGb) { + this.localDiskSizeGb = localDiskSizeGb; + return this; + } + + public Long getLocalDiskSizeGb() { + return localDiskSizeGb; + } + + public NodeInstanceTypePb setLocalDisks(Long localDisks) { + this.localDisks = localDisks; + return this; + } + + public Long getLocalDisks() { + return localDisks; + } + + public NodeInstanceTypePb setLocalNvmeDiskSizeGb(Long localNvmeDiskSizeGb) { + this.localNvmeDiskSizeGb = localNvmeDiskSizeGb; + return this; + } + + public Long getLocalNvmeDiskSizeGb() { + return localNvmeDiskSizeGb; + } + + public NodeInstanceTypePb setLocalNvmeDisks(Long localNvmeDisks) { + this.localNvmeDisks = localNvmeDisks; + return this; + } + + public Long getLocalNvmeDisks() { + return localNvmeDisks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NodeInstanceTypePb that = (NodeInstanceTypePb) o; + return Objects.equals(instanceTypeId, that.instanceTypeId) + && Objects.equals(localDiskSizeGb, that.localDiskSizeGb) + && Objects.equals(localDisks, that.localDisks) + && Objects.equals(localNvmeDiskSizeGb, that.localNvmeDiskSizeGb) + && Objects.equals(localNvmeDisks, that.localNvmeDisks); + } + + @Override + public int hashCode() { + return Objects.hash( + instanceTypeId, localDiskSizeGb, localDisks, localNvmeDiskSizeGb, localNvmeDisks); + } + + @Override + public String toString() { + return new ToStringer(NodeInstanceTypePb.class) + .add("instanceTypeId", instanceTypeId) + .add("localDiskSizeGb", localDiskSizeGb) + .add("localDisks", localDisks) + .add("localNvmeDiskSizeGb", localNvmeDiskSizeGb) + .add("localNvmeDisks", localNvmeDisks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java index 77a487607..f7342fb9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,66 +21,55 @@ * type on which it will be hosted. */ @Generated +@JsonSerialize(using = NodeType.NodeTypeSerializer.class) +@JsonDeserialize(using = NodeType.NodeTypeDeserializer.class) public class NodeType { /** * A descriptive category for this node type. Examples include "Memory Optimized" and "Compute * Optimized". */ - @JsonProperty("category") private String category; /** A string description associated with this node type, e.g., "r3.xlarge". */ - @JsonProperty("description") private String description; /** * An optional hint at the display order of node types in the UI. Within a node type category, * lowest numbers come first. */ - @JsonProperty("display_order") private Long displayOrder; /** An identifier for the type of hardware that this node runs on, e.g., "r3.2xlarge" in AWS. */ - @JsonProperty("instance_type_id") private String instanceTypeId; /** Whether the node type is deprecated. Non-deprecated node types offer greater performance. */ - @JsonProperty("is_deprecated") private Boolean isDeprecated; /** * AWS specific, whether this instance supports encryption in transit, used for hipaa and pci * workloads. */ - @JsonProperty("is_encrypted_in_transit") private Boolean isEncryptedInTransit; /** Whether this is an Arm-based instance. */ - @JsonProperty("is_graviton") private Boolean isGraviton; /** Whether this node is hidden from presentation in the UI. */ - @JsonProperty("is_hidden") private Boolean isHidden; /** Whether this node comes with IO cache enabled by default. */ - @JsonProperty("is_io_cache_enabled") private Boolean isIoCacheEnabled; /** Memory (in MB) available for this node type. */ - @JsonProperty("memory_mb") private Long memoryMb; /** A collection of node type info reported by the cloud provider */ - @JsonProperty("node_info") private CloudProviderNodeInfo nodeInfo; /** The NodeInstanceType object corresponding to instance_type_id */ - @JsonProperty("node_instance_type") private NodeInstanceType nodeInstanceType; /** Unique identifier for this node type. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -79,34 +77,27 @@ public class NodeType { * cores, if the the number of cores on a machine instance is not divisible by the number of Spark * nodes on that machine. */ - @JsonProperty("num_cores") private Double numCores; /** Number of GPUs available for this node type. */ - @JsonProperty("num_gpus") private Long numGpus; /** */ - @JsonProperty("photon_driver_capable") private Boolean photonDriverCapable; /** */ - @JsonProperty("photon_worker_capable") private Boolean photonWorkerCapable; /** Whether this node type support cluster tags. */ - @JsonProperty("support_cluster_tags") private Boolean supportClusterTags; /** * Whether this node type support EBS volumes. EBS volumes is disabled for node types that we * could place multiple corresponding containers on the same hosting instance. */ - @JsonProperty("support_ebs_volumes") private Boolean supportEbsVolumes; /** Whether this node type supports port forwarding. */ - @JsonProperty("support_port_forwarding") private Boolean supportPortForwarding; public NodeType setCategory(String category) { @@ -366,4 +357,75 @@ public String toString() { .add("supportPortForwarding", supportPortForwarding) .toString(); } + + NodeTypePb toPb() { + NodeTypePb pb = new NodeTypePb(); + pb.setCategory(category); + pb.setDescription(description); + pb.setDisplayOrder(displayOrder); + pb.setInstanceTypeId(instanceTypeId); + pb.setIsDeprecated(isDeprecated); + pb.setIsEncryptedInTransit(isEncryptedInTransit); + pb.setIsGraviton(isGraviton); + pb.setIsHidden(isHidden); + pb.setIsIoCacheEnabled(isIoCacheEnabled); + pb.setMemoryMb(memoryMb); + pb.setNodeInfo(nodeInfo); + pb.setNodeInstanceType(nodeInstanceType); + pb.setNodeTypeId(nodeTypeId); + pb.setNumCores(numCores); + pb.setNumGpus(numGpus); + pb.setPhotonDriverCapable(photonDriverCapable); + pb.setPhotonWorkerCapable(photonWorkerCapable); + pb.setSupportClusterTags(supportClusterTags); + pb.setSupportEbsVolumes(supportEbsVolumes); + pb.setSupportPortForwarding(supportPortForwarding); + + return pb; + } + + static NodeType fromPb(NodeTypePb pb) { + NodeType model = new NodeType(); + model.setCategory(pb.getCategory()); + model.setDescription(pb.getDescription()); + model.setDisplayOrder(pb.getDisplayOrder()); + model.setInstanceTypeId(pb.getInstanceTypeId()); + model.setIsDeprecated(pb.getIsDeprecated()); + model.setIsEncryptedInTransit(pb.getIsEncryptedInTransit()); + model.setIsGraviton(pb.getIsGraviton()); + model.setIsHidden(pb.getIsHidden()); + model.setIsIoCacheEnabled(pb.getIsIoCacheEnabled()); + model.setMemoryMb(pb.getMemoryMb()); + model.setNodeInfo(pb.getNodeInfo()); + model.setNodeInstanceType(pb.getNodeInstanceType()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumCores(pb.getNumCores()); + model.setNumGpus(pb.getNumGpus()); + model.setPhotonDriverCapable(pb.getPhotonDriverCapable()); + model.setPhotonWorkerCapable(pb.getPhotonWorkerCapable()); + model.setSupportClusterTags(pb.getSupportClusterTags()); + model.setSupportEbsVolumes(pb.getSupportEbsVolumes()); + model.setSupportPortForwarding(pb.getSupportPortForwarding()); + + return model; + } + + public static class NodeTypeSerializer extends JsonSerializer { + @Override + public void serialize(NodeType value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NodeTypePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NodeTypeDeserializer extends JsonDeserializer { + @Override + public NodeType deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NodeTypePb pb = mapper.readValue(p, NodeTypePb.class); + return NodeType.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypePb.java new file mode 100755 index 000000000..d0172db3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypePb.java @@ -0,0 +1,333 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A description of a Spark node type including both the dimensions of the node and the instance + * type on which it will be hosted. + */ +@Generated +class NodeTypePb { + @JsonProperty("category") + private String category; + + @JsonProperty("description") + private String description; + + @JsonProperty("display_order") + private Long displayOrder; + + @JsonProperty("instance_type_id") + private String instanceTypeId; + + @JsonProperty("is_deprecated") + private Boolean isDeprecated; + + @JsonProperty("is_encrypted_in_transit") + private Boolean isEncryptedInTransit; + + @JsonProperty("is_graviton") + private Boolean isGraviton; + + @JsonProperty("is_hidden") + private Boolean isHidden; + + @JsonProperty("is_io_cache_enabled") + private Boolean isIoCacheEnabled; + + @JsonProperty("memory_mb") + private Long memoryMb; + + @JsonProperty("node_info") + private CloudProviderNodeInfo nodeInfo; + + @JsonProperty("node_instance_type") + private NodeInstanceType nodeInstanceType; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_cores") + private Double numCores; + + @JsonProperty("num_gpus") + private Long numGpus; + + @JsonProperty("photon_driver_capable") + private Boolean photonDriverCapable; + + @JsonProperty("photon_worker_capable") + private Boolean photonWorkerCapable; + + @JsonProperty("support_cluster_tags") + private Boolean supportClusterTags; + + @JsonProperty("support_ebs_volumes") + private Boolean supportEbsVolumes; + + @JsonProperty("support_port_forwarding") + private Boolean supportPortForwarding; + + public NodeTypePb setCategory(String category) { + this.category = category; + return this; + } + + public String getCategory() { + return category; + } + + public NodeTypePb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public NodeTypePb setDisplayOrder(Long displayOrder) { + this.displayOrder = displayOrder; + return this; + } + + public Long getDisplayOrder() { + return displayOrder; + } + + public NodeTypePb setInstanceTypeId(String instanceTypeId) { + this.instanceTypeId = instanceTypeId; + return this; + } + + public String getInstanceTypeId() { + return instanceTypeId; + } + + public NodeTypePb setIsDeprecated(Boolean isDeprecated) { + this.isDeprecated = isDeprecated; + return this; + } + + public Boolean getIsDeprecated() { + return isDeprecated; + } + + public NodeTypePb setIsEncryptedInTransit(Boolean isEncryptedInTransit) { + this.isEncryptedInTransit = isEncryptedInTransit; + return this; + } + + public Boolean getIsEncryptedInTransit() { + return isEncryptedInTransit; + } + + public NodeTypePb setIsGraviton(Boolean isGraviton) { + this.isGraviton = isGraviton; + return this; + } + + public Boolean getIsGraviton() { + return isGraviton; + } + + public NodeTypePb setIsHidden(Boolean isHidden) { + this.isHidden = isHidden; + return this; + } + + public Boolean getIsHidden() { + return isHidden; + } + + public NodeTypePb setIsIoCacheEnabled(Boolean isIoCacheEnabled) { + this.isIoCacheEnabled = isIoCacheEnabled; + return this; + } + + public Boolean getIsIoCacheEnabled() { + return isIoCacheEnabled; + } + + public NodeTypePb setMemoryMb(Long memoryMb) { + this.memoryMb = memoryMb; + return this; + } + + public Long getMemoryMb() { + return memoryMb; + } + + public NodeTypePb setNodeInfo(CloudProviderNodeInfo nodeInfo) { + this.nodeInfo = nodeInfo; + return this; + } + + public CloudProviderNodeInfo getNodeInfo() { + return nodeInfo; + } + + public NodeTypePb setNodeInstanceType(NodeInstanceType nodeInstanceType) { + this.nodeInstanceType = nodeInstanceType; + return this; + } + + public NodeInstanceType getNodeInstanceType() { + return nodeInstanceType; + } + + public NodeTypePb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public NodeTypePb setNumCores(Double numCores) { + this.numCores = numCores; + return this; + } + + public Double getNumCores() { + return numCores; + } + + public NodeTypePb setNumGpus(Long numGpus) { + this.numGpus = numGpus; + return this; + } + + public Long getNumGpus() { + return numGpus; + } + + public NodeTypePb setPhotonDriverCapable(Boolean photonDriverCapable) { + this.photonDriverCapable = photonDriverCapable; + return this; + } + + public Boolean getPhotonDriverCapable() { + return photonDriverCapable; + } + + public NodeTypePb setPhotonWorkerCapable(Boolean photonWorkerCapable) { + this.photonWorkerCapable = photonWorkerCapable; + return this; + } + + public Boolean getPhotonWorkerCapable() { + return photonWorkerCapable; + } + + public NodeTypePb setSupportClusterTags(Boolean supportClusterTags) { + this.supportClusterTags = supportClusterTags; + return this; + } + + public Boolean getSupportClusterTags() { + return supportClusterTags; + } + + public NodeTypePb setSupportEbsVolumes(Boolean supportEbsVolumes) { + this.supportEbsVolumes = supportEbsVolumes; + return this; + } + + public Boolean getSupportEbsVolumes() { + return supportEbsVolumes; + } + + public NodeTypePb setSupportPortForwarding(Boolean supportPortForwarding) { + this.supportPortForwarding = supportPortForwarding; + return this; + } + + public Boolean getSupportPortForwarding() { + return supportPortForwarding; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NodeTypePb that = (NodeTypePb) o; + return Objects.equals(category, that.category) + && Objects.equals(description, that.description) + && Objects.equals(displayOrder, that.displayOrder) + && Objects.equals(instanceTypeId, that.instanceTypeId) + && Objects.equals(isDeprecated, that.isDeprecated) + && Objects.equals(isEncryptedInTransit, that.isEncryptedInTransit) + && Objects.equals(isGraviton, that.isGraviton) + && Objects.equals(isHidden, that.isHidden) + && Objects.equals(isIoCacheEnabled, that.isIoCacheEnabled) + && Objects.equals(memoryMb, that.memoryMb) + && Objects.equals(nodeInfo, that.nodeInfo) + && Objects.equals(nodeInstanceType, that.nodeInstanceType) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numCores, that.numCores) + && Objects.equals(numGpus, that.numGpus) + && Objects.equals(photonDriverCapable, that.photonDriverCapable) + && Objects.equals(photonWorkerCapable, that.photonWorkerCapable) + && Objects.equals(supportClusterTags, that.supportClusterTags) + && Objects.equals(supportEbsVolumes, that.supportEbsVolumes) + && Objects.equals(supportPortForwarding, that.supportPortForwarding); + } + + @Override + public int hashCode() { + return Objects.hash( + category, + description, + displayOrder, + instanceTypeId, + isDeprecated, + isEncryptedInTransit, + isGraviton, + isHidden, + isIoCacheEnabled, + memoryMb, + nodeInfo, + nodeInstanceType, + nodeTypeId, + numCores, + numGpus, + photonDriverCapable, + photonWorkerCapable, + supportClusterTags, + supportEbsVolumes, + supportPortForwarding); + } + + @Override + public String toString() { + return new ToStringer(NodeTypePb.class) + .add("category", category) + .add("description", description) + .add("displayOrder", displayOrder) + .add("instanceTypeId", instanceTypeId) + .add("isDeprecated", isDeprecated) + .add("isEncryptedInTransit", isEncryptedInTransit) + .add("isGraviton", isGraviton) + .add("isHidden", isHidden) + .add("isIoCacheEnabled", isIoCacheEnabled) + .add("memoryMb", memoryMb) + .add("nodeInfo", nodeInfo) + .add("nodeInstanceType", nodeInstanceType) + .add("nodeTypeId", nodeTypeId) + .add("numCores", numCores) + .add("numGpus", numGpus) + .add("photonDriverCapable", photonDriverCapable) + .add("photonWorkerCapable", photonWorkerCapable) + .add("supportClusterTags", supportClusterTags) + .add("supportEbsVolumes", supportEbsVolumes) + .add("supportPortForwarding", supportPortForwarding) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java index 7e21345dd..8e525ce08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Error message of a failed pending instances */ @Generated +@JsonSerialize(using = PendingInstanceError.PendingInstanceErrorSerializer.class) +@JsonDeserialize(using = PendingInstanceError.PendingInstanceErrorDeserializer.class) public class PendingInstanceError { /** */ - @JsonProperty("instance_id") private String instanceId; /** */ - @JsonProperty("message") private String message; public PendingInstanceError setInstanceId(String instanceId) { @@ -56,4 +65,42 @@ public String toString() { .add("message", message) .toString(); } + + PendingInstanceErrorPb toPb() { + PendingInstanceErrorPb pb = new PendingInstanceErrorPb(); + pb.setInstanceId(instanceId); + pb.setMessage(message); + + return pb; + } + + static PendingInstanceError fromPb(PendingInstanceErrorPb pb) { + PendingInstanceError model = new PendingInstanceError(); + model.setInstanceId(pb.getInstanceId()); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class PendingInstanceErrorSerializer extends JsonSerializer { + @Override + public void serialize( + PendingInstanceError value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PendingInstanceErrorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PendingInstanceErrorDeserializer + extends JsonDeserializer { + @Override + public PendingInstanceError deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PendingInstanceErrorPb pb = mapper.readValue(p, PendingInstanceErrorPb.class); + return PendingInstanceError.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceErrorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceErrorPb.java new file mode 100755 index 000000000..6393db77d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceErrorPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Error message of a failed pending instances */ +@Generated +class PendingInstanceErrorPb { + @JsonProperty("instance_id") + private String instanceId; + + @JsonProperty("message") + private String message; + + public PendingInstanceErrorPb setInstanceId(String instanceId) { + this.instanceId = instanceId; + return this; + } + + public String getInstanceId() { + return instanceId; + } + + public PendingInstanceErrorPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PendingInstanceErrorPb that = (PendingInstanceErrorPb) o; + return Objects.equals(instanceId, that.instanceId) && Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(instanceId, message); + } + + @Override + public String toString() { + return new ToStringer(PendingInstanceErrorPb.class) + .add("instanceId", instanceId) + .add("message", message) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java index 193d265d4..78453534b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PermanentDeleteCluster.PermanentDeleteClusterSerializer.class) +@JsonDeserialize(using = PermanentDeleteCluster.PermanentDeleteClusterDeserializer.class) public class PermanentDeleteCluster { /** The cluster to be deleted. */ - @JsonProperty("cluster_id") private String clusterId; public PermanentDeleteCluster setClusterId(String clusterId) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(PermanentDeleteCluster.class).add("clusterId", clusterId).toString(); } + + PermanentDeleteClusterPb toPb() { + PermanentDeleteClusterPb pb = new PermanentDeleteClusterPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static PermanentDeleteCluster fromPb(PermanentDeleteClusterPb pb) { + PermanentDeleteCluster model = new PermanentDeleteCluster(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class PermanentDeleteClusterSerializer + extends JsonSerializer { + @Override + public void serialize( + PermanentDeleteCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermanentDeleteClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermanentDeleteClusterDeserializer + extends JsonDeserializer { + @Override + public PermanentDeleteCluster deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermanentDeleteClusterPb pb = mapper.readValue(p, PermanentDeleteClusterPb.class); + return PermanentDeleteCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterPb.java new file mode 100755 index 000000000..5919ec2bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PermanentDeleteClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + public PermanentDeleteClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermanentDeleteClusterPb that = (PermanentDeleteClusterPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(PermanentDeleteClusterPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java index 4a82a0542..12090226b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = PermanentDeleteClusterResponse.PermanentDeleteClusterResponseSerializer.class) +@JsonDeserialize( + using = PermanentDeleteClusterResponse.PermanentDeleteClusterResponseDeserializer.class) public class PermanentDeleteClusterResponse { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(PermanentDeleteClusterResponse.class).toString(); } + + PermanentDeleteClusterResponsePb toPb() { + PermanentDeleteClusterResponsePb pb = new PermanentDeleteClusterResponsePb(); + + return pb; + } + + static PermanentDeleteClusterResponse fromPb(PermanentDeleteClusterResponsePb pb) { + PermanentDeleteClusterResponse model = new PermanentDeleteClusterResponse(); + + return model; + } + + public static class PermanentDeleteClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + PermanentDeleteClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermanentDeleteClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermanentDeleteClusterResponseDeserializer + extends JsonDeserializer { + @Override + public PermanentDeleteClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermanentDeleteClusterResponsePb pb = + mapper.readValue(p, PermanentDeleteClusterResponsePb.class); + return PermanentDeleteClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponsePb.java new file mode 100755 index 000000000..da29d6b28 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PermanentDeleteClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PermanentDeleteClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java index acdbf8013..cae3d0173 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PinCluster.PinClusterSerializer.class) +@JsonDeserialize(using = PinCluster.PinClusterDeserializer.class) public class PinCluster { /** */ - @JsonProperty("cluster_id") private String clusterId; public PinCluster setClusterId(String clusterId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(PinCluster.class).add("clusterId", clusterId).toString(); } + + PinClusterPb toPb() { + PinClusterPb pb = new PinClusterPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static PinCluster fromPb(PinClusterPb pb) { + PinCluster model = new PinCluster(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class PinClusterSerializer extends JsonSerializer { + @Override + public void serialize(PinCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PinClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PinClusterDeserializer extends JsonDeserializer { + @Override + public PinCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PinClusterPb pb = mapper.readValue(p, PinClusterPb.class); + return PinCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterPb.java new file mode 100755 index 000000000..aa465d52b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PinClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + public PinClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PinClusterPb that = (PinClusterPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(PinClusterPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java index ad4b7f691..1c99cd5f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PinClusterResponse.PinClusterResponseSerializer.class) +@JsonDeserialize(using = PinClusterResponse.PinClusterResponseDeserializer.class) public class PinClusterResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(PinClusterResponse.class).toString(); } + + PinClusterResponsePb toPb() { + PinClusterResponsePb pb = new PinClusterResponsePb(); + + return pb; + } + + static PinClusterResponse fromPb(PinClusterResponsePb pb) { + PinClusterResponse model = new PinClusterResponse(); + + return model; + } + + public static class PinClusterResponseSerializer extends JsonSerializer { + @Override + public void serialize(PinClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PinClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PinClusterResponseDeserializer extends JsonDeserializer { + @Override + public PinClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PinClusterResponsePb pb = mapper.readValue(p, PinClusterResponsePb.class); + return PinClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponsePb.java new file mode 100755 index 000000000..32e6af3bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PinClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PinClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java index e81025059..74c41049c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Describes a Cluster Policy entity. */ @Generated +@JsonSerialize(using = Policy.PolicySerializer.class) +@JsonDeserialize(using = Policy.PolicyDeserializer.class) public class Policy { /** Creation time. The timestamp (in millisecond) when this Cluster Policy was created. */ - @JsonProperty("created_at_timestamp") private Long createdAtTimestamp; /** * Creator user name. The field won't be included in the response if the user has already been * deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** @@ -28,39 +37,33 @@ public class Policy { *

[Databricks Cluster Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("definition") private String definition; /** Additional human-readable description of the cluster policy. */ - @JsonProperty("description") private String description; /** * If true, policy is a default policy created and managed by Databricks. Default policies cannot * be deleted, and their policy families cannot be changed. */ - @JsonProperty("is_default") private Boolean isDefault; /** * A list of libraries to be installed on the next cluster restart that uses this policy. The * maximum number of libraries is 500. */ - @JsonProperty("libraries") private Collection libraries; /** * Max number of clusters per user that can be active using this policy. If not present, there is * no max limit. */ - @JsonProperty("max_clusters_per_user") private Long maxClustersPerUser; /** * Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and * 100 characters. */ - @JsonProperty("name") private String name; /** @@ -73,7 +76,6 @@ public class Policy { *

[Databricks Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; /** @@ -83,11 +85,9 @@ public class Policy { *

Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to * customize the policy definition. */ - @JsonProperty("policy_family_id") private String policyFamilyId; /** Canonical unique identifier for the Cluster Policy. */ - @JsonProperty("policy_id") private String policyId; public Policy setCreatedAtTimestamp(Long createdAtTimestamp) { @@ -239,4 +239,57 @@ public String toString() { .add("policyId", policyId) .toString(); } + + PolicyPb toPb() { + PolicyPb pb = new PolicyPb(); + pb.setCreatedAtTimestamp(createdAtTimestamp); + pb.setCreatorUserName(creatorUserName); + pb.setDefinition(definition); + pb.setDescription(description); + pb.setIsDefault(isDefault); + pb.setLibraries(libraries); + pb.setMaxClustersPerUser(maxClustersPerUser); + pb.setName(name); + pb.setPolicyFamilyDefinitionOverrides(policyFamilyDefinitionOverrides); + pb.setPolicyFamilyId(policyFamilyId); + pb.setPolicyId(policyId); + + return pb; + } + + static Policy fromPb(PolicyPb pb) { + Policy model = new Policy(); + model.setCreatedAtTimestamp(pb.getCreatedAtTimestamp()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setDefinition(pb.getDefinition()); + model.setDescription(pb.getDescription()); + model.setIsDefault(pb.getIsDefault()); + model.setLibraries(pb.getLibraries()); + model.setMaxClustersPerUser(pb.getMaxClustersPerUser()); + model.setName(pb.getName()); + model.setPolicyFamilyDefinitionOverrides(pb.getPolicyFamilyDefinitionOverrides()); + model.setPolicyFamilyId(pb.getPolicyFamilyId()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class PolicySerializer extends JsonSerializer { + @Override + public void serialize(Policy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PolicyDeserializer extends JsonDeserializer { + @Override + public Policy deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PolicyPb pb = mapper.readValue(p, PolicyPb.class); + return Policy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java index 3fe7f6593..d8ed5a5a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java @@ -22,7 +22,7 @@ public EnforceClusterComplianceResponse enforceCompliance( String path = "/api/2.0/policies/clusters/enforce-compliance"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnforceClusterComplianceResponse.class); @@ -36,7 +36,7 @@ public GetClusterComplianceResponse getCompliance(GetClusterComplianceRequest re String path = "/api/2.0/policies/clusters/get-compliance"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetClusterComplianceResponse.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public ListClusterCompliancesResponse listCompliance(ListClusterCompliancesReque String path = "/api/2.0/policies/clusters/list-compliance"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListClusterCompliancesResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java index 90a79ba2c..b7e575fe7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java @@ -21,7 +21,7 @@ public PolicyFamily get(GetPolicyFamilyRequest request) { String path = String.format("/api/2.0/policy-families/%s", request.getPolicyFamilyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PolicyFamily.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public ListPolicyFamiliesResponse list(ListPolicyFamiliesRequest request) { String path = "/api/2.0/policy-families"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListPolicyFamiliesResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java index 8b1cc84cb..88a7096a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PolicyFamily.PolicyFamilySerializer.class) +@JsonDeserialize(using = PolicyFamily.PolicyFamilyDeserializer.class) public class PolicyFamily { /** * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -15,19 +26,15 @@ public class PolicyFamily { *

[Databricks Cluster Policy Definition Language]: * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ - @JsonProperty("definition") private String definition; /** Human-readable description of the purpose of the policy family. */ - @JsonProperty("description") private String description; /** Name of the policy family. */ - @JsonProperty("name") private String name; /** Unique identifier for the policy family. */ - @JsonProperty("policy_family_id") private String policyFamilyId; public PolicyFamily setDefinition(String definition) { @@ -91,4 +98,43 @@ public String toString() { .add("policyFamilyId", policyFamilyId) .toString(); } + + PolicyFamilyPb toPb() { + PolicyFamilyPb pb = new PolicyFamilyPb(); + pb.setDefinition(definition); + pb.setDescription(description); + pb.setName(name); + pb.setPolicyFamilyId(policyFamilyId); + + return pb; + } + + static PolicyFamily fromPb(PolicyFamilyPb pb) { + PolicyFamily model = new PolicyFamily(); + model.setDefinition(pb.getDefinition()); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setPolicyFamilyId(pb.getPolicyFamilyId()); + + return model; + } + + public static class PolicyFamilySerializer extends JsonSerializer { + @Override + public void serialize(PolicyFamily value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PolicyFamilyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PolicyFamilyDeserializer extends JsonDeserializer { + @Override + public PolicyFamily deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PolicyFamilyPb pb = mapper.readValue(p, PolicyFamilyPb.class); + return PolicyFamily.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamilyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamilyPb.java new file mode 100755 index 000000000..420cb44ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamilyPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PolicyFamilyPb { + @JsonProperty("definition") + private String definition; + + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("policy_family_id") + private String policyFamilyId; + + public PolicyFamilyPb setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public PolicyFamilyPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PolicyFamilyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PolicyFamilyPb setPolicyFamilyId(String policyFamilyId) { + this.policyFamilyId = policyFamilyId; + return this; + } + + public String getPolicyFamilyId() { + return policyFamilyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PolicyFamilyPb that = (PolicyFamilyPb) o; + return Objects.equals(definition, that.definition) + && Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(policyFamilyId, that.policyFamilyId); + } + + @Override + public int hashCode() { + return Objects.hash(definition, description, name, policyFamilyId); + } + + @Override + public String toString() { + return new ToStringer(PolicyFamilyPb.class) + .add("definition", definition) + .add("description", description) + .add("name", name) + .add("policyFamilyId", policyFamilyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyPb.java new file mode 100755 index 000000000..bc4badda3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyPb.java @@ -0,0 +1,196 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Describes a Cluster Policy entity. */ +@Generated +class PolicyPb { + @JsonProperty("created_at_timestamp") + private Long createdAtTimestamp; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("definition") + private String definition; + + @JsonProperty("description") + private String description; + + @JsonProperty("is_default") + private Boolean isDefault; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("max_clusters_per_user") + private Long maxClustersPerUser; + + @JsonProperty("name") + private String name; + + @JsonProperty("policy_family_definition_overrides") + private String policyFamilyDefinitionOverrides; + + @JsonProperty("policy_family_id") + private String policyFamilyId; + + @JsonProperty("policy_id") + private String policyId; + + public PolicyPb setCreatedAtTimestamp(Long createdAtTimestamp) { + this.createdAtTimestamp = createdAtTimestamp; + return this; + } + + public Long getCreatedAtTimestamp() { + return createdAtTimestamp; + } + + public PolicyPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public PolicyPb setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public PolicyPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PolicyPb setIsDefault(Boolean isDefault) { + this.isDefault = isDefault; + return this; + } + + public Boolean getIsDefault() { + return isDefault; + } + + public PolicyPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public PolicyPb setMaxClustersPerUser(Long maxClustersPerUser) { + this.maxClustersPerUser = maxClustersPerUser; + return this; + } + + public Long getMaxClustersPerUser() { + return maxClustersPerUser; + } + + public PolicyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PolicyPb setPolicyFamilyDefinitionOverrides(String policyFamilyDefinitionOverrides) { + this.policyFamilyDefinitionOverrides = policyFamilyDefinitionOverrides; + return this; + } + + public String getPolicyFamilyDefinitionOverrides() { + return policyFamilyDefinitionOverrides; + } + + public PolicyPb setPolicyFamilyId(String policyFamilyId) { + this.policyFamilyId = policyFamilyId; + return this; + } + + public String getPolicyFamilyId() { + return policyFamilyId; + } + + public PolicyPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PolicyPb that = (PolicyPb) o; + return Objects.equals(createdAtTimestamp, that.createdAtTimestamp) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(definition, that.definition) + && Objects.equals(description, that.description) + && Objects.equals(isDefault, that.isDefault) + && Objects.equals(libraries, that.libraries) + && Objects.equals(maxClustersPerUser, that.maxClustersPerUser) + && Objects.equals(name, that.name) + && Objects.equals(policyFamilyDefinitionOverrides, that.policyFamilyDefinitionOverrides) + && Objects.equals(policyFamilyId, that.policyFamilyId) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAtTimestamp, + creatorUserName, + definition, + description, + isDefault, + libraries, + maxClustersPerUser, + name, + policyFamilyDefinitionOverrides, + policyFamilyId, + policyId); + } + + @Override + public String toString() { + return new ToStringer(PolicyPb.class) + .add("createdAtTimestamp", createdAtTimestamp) + .add("creatorUserName", creatorUserName) + .add("definition", definition) + .add("description", description) + .add("isDefault", isDefault) + .add("libraries", libraries) + .add("maxClustersPerUser", maxClustersPerUser) + .add("name", name) + .add("policyFamilyDefinitionOverrides", policyFamilyDefinitionOverrides) + .add("policyFamilyId", policyFamilyId) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java index c5896f10d..043f1d288 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PythonPyPiLibrary.PythonPyPiLibrarySerializer.class) +@JsonDeserialize(using = PythonPyPiLibrary.PythonPyPiLibraryDeserializer.class) public class PythonPyPiLibrary { /** * The name of the pypi package to install. An optional exact version specification is also * supported. Examples: "simplejson" and "simplejson==3.8.0". */ - @JsonProperty("package") private String packageValue; /** * The repository where the package can be found. If not specified, the default pip index is used. */ - @JsonProperty("repo") private String repo; public PythonPyPiLibrary setPackage(String packageValue) { @@ -60,4 +69,40 @@ public String toString() { .add("repo", repo) .toString(); } + + PythonPyPiLibraryPb toPb() { + PythonPyPiLibraryPb pb = new PythonPyPiLibraryPb(); + pb.setPackage(packageValue); + pb.setRepo(repo); + + return pb; + } + + static PythonPyPiLibrary fromPb(PythonPyPiLibraryPb pb) { + PythonPyPiLibrary model = new PythonPyPiLibrary(); + model.setPackage(pb.getPackage()); + model.setRepo(pb.getRepo()); + + return model; + } + + public static class PythonPyPiLibrarySerializer extends JsonSerializer { + @Override + public void serialize(PythonPyPiLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PythonPyPiLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PythonPyPiLibraryDeserializer extends JsonDeserializer { + @Override + public PythonPyPiLibrary deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PythonPyPiLibraryPb pb = mapper.readValue(p, PythonPyPiLibraryPb.class); + return PythonPyPiLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibraryPb.java new file mode 100755 index 000000000..98ae95ee3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibraryPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PythonPyPiLibraryPb { + @JsonProperty("package") + private String packageValue; + + @JsonProperty("repo") + private String repo; + + public PythonPyPiLibraryPb setPackage(String packageValue) { + this.packageValue = packageValue; + return this; + } + + public String getPackage() { + return packageValue; + } + + public PythonPyPiLibraryPb setRepo(String repo) { + this.repo = repo; + return this; + } + + public String getRepo() { + return repo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PythonPyPiLibraryPb that = (PythonPyPiLibraryPb) o; + return Objects.equals(packageValue, that.packageValue) && Objects.equals(repo, that.repo); + } + + @Override + public int hashCode() { + return Objects.hash(packageValue, repo); + } + + @Override + public String toString() { + return new ToStringer(PythonPyPiLibraryPb.class) + .add("packageValue", packageValue) + .add("repo", repo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java index 6970cce62..961fce0e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RCranLibrary.RCranLibrarySerializer.class) +@JsonDeserialize(using = RCranLibrary.RCranLibraryDeserializer.class) public class RCranLibrary { /** The name of the CRAN package to install. */ - @JsonProperty("package") private String packageValue; /** * The repository where the package can be found. If not specified, the default CRAN repo is used. */ - @JsonProperty("repo") private String repo; public RCranLibrary setPackage(String packageValue) { @@ -57,4 +66,39 @@ public String toString() { .add("repo", repo) .toString(); } + + RCranLibraryPb toPb() { + RCranLibraryPb pb = new RCranLibraryPb(); + pb.setPackage(packageValue); + pb.setRepo(repo); + + return pb; + } + + static RCranLibrary fromPb(RCranLibraryPb pb) { + RCranLibrary model = new RCranLibrary(); + model.setPackage(pb.getPackage()); + model.setRepo(pb.getRepo()); + + return model; + } + + public static class RCranLibrarySerializer extends JsonSerializer { + @Override + public void serialize(RCranLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RCranLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RCranLibraryDeserializer extends JsonDeserializer { + @Override + public RCranLibrary deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RCranLibraryPb pb = mapper.readValue(p, RCranLibraryPb.class); + return RCranLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibraryPb.java new file mode 100755 index 000000000..2c3dd4fcf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibraryPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RCranLibraryPb { + @JsonProperty("package") + private String packageValue; + + @JsonProperty("repo") + private String repo; + + public RCranLibraryPb setPackage(String packageValue) { + this.packageValue = packageValue; + return this; + } + + public String getPackage() { + return packageValue; + } + + public RCranLibraryPb setRepo(String repo) { + this.repo = repo; + return this; + } + + public String getRepo() { + return repo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RCranLibraryPb that = (RCranLibraryPb) o; + return Objects.equals(packageValue, that.packageValue) && Objects.equals(repo, that.repo); + } + + @Override + public int hashCode() { + return Objects.hash(packageValue, repo); + } + + @Override + public String toString() { + return new ToStringer(RCranLibraryPb.class) + .add("packageValue", packageValue) + .add("repo", repo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java index 0c2a7fc7a..47d81cbb1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RemoveInstanceProfile.RemoveInstanceProfileSerializer.class) +@JsonDeserialize(using = RemoveInstanceProfile.RemoveInstanceProfileDeserializer.class) public class RemoveInstanceProfile { /** The ARN of the instance profile to remove. This field is required. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; public RemoveInstanceProfile setInstanceProfileArn(String instanceProfileArn) { @@ -41,4 +51,41 @@ public String toString() { .add("instanceProfileArn", instanceProfileArn) .toString(); } + + RemoveInstanceProfilePb toPb() { + RemoveInstanceProfilePb pb = new RemoveInstanceProfilePb(); + pb.setInstanceProfileArn(instanceProfileArn); + + return pb; + } + + static RemoveInstanceProfile fromPb(RemoveInstanceProfilePb pb) { + RemoveInstanceProfile model = new RemoveInstanceProfile(); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + + return model; + } + + public static class RemoveInstanceProfileSerializer + extends JsonSerializer { + @Override + public void serialize( + RemoveInstanceProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RemoveInstanceProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RemoveInstanceProfileDeserializer + extends JsonDeserializer { + @Override + public RemoveInstanceProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RemoveInstanceProfilePb pb = mapper.readValue(p, RemoveInstanceProfilePb.class); + return RemoveInstanceProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfilePb.java new file mode 100755 index 000000000..78fc75409 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfilePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RemoveInstanceProfilePb { + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + public RemoveInstanceProfilePb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RemoveInstanceProfilePb that = (RemoveInstanceProfilePb) o; + return Objects.equals(instanceProfileArn, that.instanceProfileArn); + } + + @Override + public int hashCode() { + return Objects.hash(instanceProfileArn); + } + + @Override + public String toString() { + return new ToStringer(RemoveInstanceProfilePb.class) + .add("instanceProfileArn", instanceProfileArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java index 7c8b2e615..ed15978aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RemoveResponse.RemoveResponseSerializer.class) +@JsonDeserialize(using = RemoveResponse.RemoveResponseDeserializer.class) public class RemoveResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(RemoveResponse.class).toString(); } + + RemoveResponsePb toPb() { + RemoveResponsePb pb = new RemoveResponsePb(); + + return pb; + } + + static RemoveResponse fromPb(RemoveResponsePb pb) { + RemoveResponse model = new RemoveResponse(); + + return model; + } + + public static class RemoveResponseSerializer extends JsonSerializer { + @Override + public void serialize(RemoveResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RemoveResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RemoveResponseDeserializer extends JsonDeserializer { + @Override + public RemoveResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RemoveResponsePb pb = mapper.readValue(p, RemoveResponsePb.class); + return RemoveResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponsePb.java new file mode 100755 index 000000000..2e62d5a3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RemoveResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RemoveResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java index 5af6421b9..aac99ccfb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResizeCluster.ResizeClusterSerializer.class) +@JsonDeserialize(using = ResizeCluster.ResizeClusterDeserializer.class) public class ResizeCluster { /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** The cluster to be resized. */ - @JsonProperty("cluster_id") private String clusterId; /** @@ -30,7 +39,6 @@ public class ResizeCluster { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; public ResizeCluster setAutoscale(AutoScale autoscale) { @@ -83,4 +91,41 @@ public String toString() { .add("numWorkers", numWorkers) .toString(); } + + ResizeClusterPb toPb() { + ResizeClusterPb pb = new ResizeClusterPb(); + pb.setAutoscale(autoscale); + pb.setClusterId(clusterId); + pb.setNumWorkers(numWorkers); + + return pb; + } + + static ResizeCluster fromPb(ResizeClusterPb pb) { + ResizeCluster model = new ResizeCluster(); + model.setAutoscale(pb.getAutoscale()); + model.setClusterId(pb.getClusterId()); + model.setNumWorkers(pb.getNumWorkers()); + + return model; + } + + public static class ResizeClusterSerializer extends JsonSerializer { + @Override + public void serialize(ResizeCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResizeClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResizeClusterDeserializer extends JsonDeserializer { + @Override + public ResizeCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResizeClusterPb pb = mapper.readValue(p, ResizeClusterPb.class); + return ResizeCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterPb.java new file mode 100755 index 000000000..5f7700829 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResizeClusterPb { + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("num_workers") + private Long numWorkers; + + public ResizeClusterPb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public ResizeClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ResizeClusterPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResizeClusterPb that = (ResizeClusterPb) o; + return Objects.equals(autoscale, that.autoscale) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(numWorkers, that.numWorkers); + } + + @Override + public int hashCode() { + return Objects.hash(autoscale, clusterId, numWorkers); + } + + @Override + public String toString() { + return new ToStringer(ResizeClusterPb.class) + .add("autoscale", autoscale) + .add("clusterId", clusterId) + .add("numWorkers", numWorkers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java index a14e93d60..5864d8202 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResizeClusterResponse.ResizeClusterResponseSerializer.class) +@JsonDeserialize(using = ResizeClusterResponse.ResizeClusterResponseDeserializer.class) public class ResizeClusterResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(ResizeClusterResponse.class).toString(); } + + ResizeClusterResponsePb toPb() { + ResizeClusterResponsePb pb = new ResizeClusterResponsePb(); + + return pb; + } + + static ResizeClusterResponse fromPb(ResizeClusterResponsePb pb) { + ResizeClusterResponse model = new ResizeClusterResponse(); + + return model; + } + + public static class ResizeClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ResizeClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResizeClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResizeClusterResponseDeserializer + extends JsonDeserializer { + @Override + public ResizeClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResizeClusterResponsePb pb = mapper.readValue(p, ResizeClusterResponsePb.class); + return ResizeClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponsePb.java new file mode 100755 index 000000000..4274f4f12 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ResizeClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ResizeClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java index ee78f3391..b00ca11f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestartCluster.RestartClusterSerializer.class) +@JsonDeserialize(using = RestartCluster.RestartClusterDeserializer.class) public class RestartCluster { /** The cluster to be started. */ - @JsonProperty("cluster_id") private String clusterId; /** */ - @JsonProperty("restart_user") private String restartUser; public RestartCluster setClusterId(String clusterId) { @@ -56,4 +65,40 @@ public String toString() { .add("restartUser", restartUser) .toString(); } + + RestartClusterPb toPb() { + RestartClusterPb pb = new RestartClusterPb(); + pb.setClusterId(clusterId); + pb.setRestartUser(restartUser); + + return pb; + } + + static RestartCluster fromPb(RestartClusterPb pb) { + RestartCluster model = new RestartCluster(); + model.setClusterId(pb.getClusterId()); + model.setRestartUser(pb.getRestartUser()); + + return model; + } + + public static class RestartClusterSerializer extends JsonSerializer { + @Override + public void serialize(RestartCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestartClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestartClusterDeserializer extends JsonDeserializer { + @Override + public RestartCluster deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestartClusterPb pb = mapper.readValue(p, RestartClusterPb.class); + return RestartCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterPb.java new file mode 100755 index 000000000..621d4a7cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestartClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("restart_user") + private String restartUser; + + public RestartClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public RestartClusterPb setRestartUser(String restartUser) { + this.restartUser = restartUser; + return this; + } + + public String getRestartUser() { + return restartUser; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestartClusterPb that = (RestartClusterPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(restartUser, that.restartUser); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, restartUser); + } + + @Override + public String toString() { + return new ToStringer(RestartClusterPb.class) + .add("clusterId", clusterId) + .add("restartUser", restartUser) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java index a1dac51ac..a6b4268c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestartClusterResponse.RestartClusterResponseSerializer.class) +@JsonDeserialize(using = RestartClusterResponse.RestartClusterResponseDeserializer.class) public class RestartClusterResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(RestartClusterResponse.class).toString(); } + + RestartClusterResponsePb toPb() { + RestartClusterResponsePb pb = new RestartClusterResponsePb(); + + return pb; + } + + static RestartClusterResponse fromPb(RestartClusterResponsePb pb) { + RestartClusterResponse model = new RestartClusterResponse(); + + return model; + } + + public static class RestartClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RestartClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestartClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestartClusterResponseDeserializer + extends JsonDeserializer { + @Override + public RestartClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestartClusterResponsePb pb = mapper.readValue(p, RestartClusterResponsePb.class); + return RestartClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponsePb.java new file mode 100755 index 000000000..7d541eb48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RestartClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RestartClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java index ef5dfa156..7eed66491 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java @@ -4,51 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = Results.ResultsSerializer.class) +@JsonDeserialize(using = Results.ResultsDeserializer.class) public class Results { /** The cause of the error */ - @JsonProperty("cause") private String cause; /** */ - @JsonProperty("data") private Object data; /** The image filename */ - @JsonProperty("fileName") private String fileName; /** */ - @JsonProperty("fileNames") private Collection fileNames; /** true if a JSON schema is returned instead of a string representation of the Hive type. */ - @JsonProperty("isJsonSchema") private Boolean isJsonSchema; /** internal field used by SDK */ - @JsonProperty("pos") private Long pos; /** */ - @JsonProperty("resultType") private ResultType resultType; /** The table schema */ - @JsonProperty("schema") private Collection> schema; /** The summary of the error */ - @JsonProperty("summary") private String summary; /** true if partial results are returned. */ - @JsonProperty("truncated") private Boolean truncated; public Results setCause(String cause) { @@ -188,4 +189,55 @@ public String toString() { .add("truncated", truncated) .toString(); } + + ResultsPb toPb() { + ResultsPb pb = new ResultsPb(); + pb.setCause(cause); + pb.setData(data); + pb.setFileName(fileName); + pb.setFileNames(fileNames); + pb.setIsJsonSchema(isJsonSchema); + pb.setPos(pos); + pb.setResultType(resultType); + pb.setSchema(schema); + pb.setSummary(summary); + pb.setTruncated(truncated); + + return pb; + } + + static Results fromPb(ResultsPb pb) { + Results model = new Results(); + model.setCause(pb.getCause()); + model.setData(pb.getData()); + model.setFileName(pb.getFileName()); + model.setFileNames(pb.getFileNames()); + model.setIsJsonSchema(pb.getIsJsonSchema()); + model.setPos(pb.getPos()); + model.setResultType(pb.getResultType()); + model.setSchema(pb.getSchema()); + model.setSummary(pb.getSummary()); + model.setTruncated(pb.getTruncated()); + + return model; + } + + public static class ResultsSerializer extends JsonSerializer { + @Override + public void serialize(Results value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultsDeserializer extends JsonDeserializer { + @Override + public Results deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultsPb pb = mapper.readValue(p, ResultsPb.class); + return Results.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResultsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResultsPb.java new file mode 100755 index 000000000..54a3a73fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResultsPb.java @@ -0,0 +1,181 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class ResultsPb { + @JsonProperty("cause") + private String cause; + + @JsonProperty("data") + private Object data; + + @JsonProperty("fileName") + private String fileName; + + @JsonProperty("fileNames") + private Collection fileNames; + + @JsonProperty("isJsonSchema") + private Boolean isJsonSchema; + + @JsonProperty("pos") + private Long pos; + + @JsonProperty("resultType") + private ResultType resultType; + + @JsonProperty("schema") + private Collection> schema; + + @JsonProperty("summary") + private String summary; + + @JsonProperty("truncated") + private Boolean truncated; + + public ResultsPb setCause(String cause) { + this.cause = cause; + return this; + } + + public String getCause() { + return cause; + } + + public ResultsPb setData(Object data) { + this.data = data; + return this; + } + + public Object getData() { + return data; + } + + public ResultsPb setFileName(String fileName) { + this.fileName = fileName; + return this; + } + + public String getFileName() { + return fileName; + } + + public ResultsPb setFileNames(Collection fileNames) { + this.fileNames = fileNames; + return this; + } + + public Collection getFileNames() { + return fileNames; + } + + public ResultsPb setIsJsonSchema(Boolean isJsonSchema) { + this.isJsonSchema = isJsonSchema; + return this; + } + + public Boolean getIsJsonSchema() { + return isJsonSchema; + } + + public ResultsPb setPos(Long pos) { + this.pos = pos; + return this; + } + + public Long getPos() { + return pos; + } + + public ResultsPb setResultType(ResultType resultType) { + this.resultType = resultType; + return this; + } + + public ResultType getResultType() { + return resultType; + } + + public ResultsPb setSchema(Collection> schema) { + this.schema = schema; + return this; + } + + public Collection> getSchema() { + return schema; + } + + public ResultsPb setSummary(String summary) { + this.summary = summary; + return this; + } + + public String getSummary() { + return summary; + } + + public ResultsPb setTruncated(Boolean truncated) { + this.truncated = truncated; + return this; + } + + public Boolean getTruncated() { + return truncated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultsPb that = (ResultsPb) o; + return Objects.equals(cause, that.cause) + && Objects.equals(data, that.data) + && Objects.equals(fileName, that.fileName) + && Objects.equals(fileNames, that.fileNames) + && Objects.equals(isJsonSchema, that.isJsonSchema) + && Objects.equals(pos, that.pos) + && Objects.equals(resultType, that.resultType) + && Objects.equals(schema, that.schema) + && Objects.equals(summary, that.summary) + && Objects.equals(truncated, that.truncated); + } + + @Override + public int hashCode() { + return Objects.hash( + cause, + data, + fileName, + fileNames, + isJsonSchema, + pos, + resultType, + schema, + summary, + truncated); + } + + @Override + public String toString() { + return new ToStringer(ResultsPb.class) + .add("cause", cause) + .add("data", data) + .add("fileName", fileName) + .add("fileNames", fileNames) + .add("isJsonSchema", isJsonSchema) + .add("pos", pos) + .add("resultType", resultType) + .add("schema", schema) + .add("summary", summary) + .add("truncated", truncated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java index de453f1ca..45d7116a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location in Amazon S3 */ @Generated +@JsonSerialize(using = S3StorageInfo.S3StorageInfoSerializer.class) +@JsonDeserialize(using = S3StorageInfo.S3StorageInfoDeserializer.class) public class S3StorageInfo { /** * (Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`. If @@ -19,7 +30,6 @@ public class S3StorageInfo { * for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to * read the logs. */ - @JsonProperty("canned_acl") private String cannedAcl; /** @@ -27,39 +37,33 @@ public class S3StorageInfo { * cluster iam role, please make sure you set cluster iam role and the role has write access to * the destination. Please also note that you cannot use AWS keys to deliver logs. */ - @JsonProperty("destination") private String destination; /** (Optional) Flag to enable server side encryption, `false` by default. */ - @JsonProperty("enable_encryption") private Boolean enableEncryption; /** * (Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when * encryption is enabled and the default type is `sse-s3`. */ - @JsonProperty("encryption_type") private String encryptionType; /** * S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be * set. If both are set, endpoint will be used. */ - @JsonProperty("endpoint") private String endpoint; /** * (Optional) Kms key which will be used if encryption is enabled and encryption type is set to * `sse-kms`. */ - @JsonProperty("kms_key") private String kmsKey; /** * S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set, * endpoint will be used. */ - @JsonProperty("region") private String region; public S3StorageInfo setCannedAcl(String cannedAcl) { @@ -157,4 +161,49 @@ public String toString() { .add("region", region) .toString(); } + + S3StorageInfoPb toPb() { + S3StorageInfoPb pb = new S3StorageInfoPb(); + pb.setCannedAcl(cannedAcl); + pb.setDestination(destination); + pb.setEnableEncryption(enableEncryption); + pb.setEncryptionType(encryptionType); + pb.setEndpoint(endpoint); + pb.setKmsKey(kmsKey); + pb.setRegion(region); + + return pb; + } + + static S3StorageInfo fromPb(S3StorageInfoPb pb) { + S3StorageInfo model = new S3StorageInfo(); + model.setCannedAcl(pb.getCannedAcl()); + model.setDestination(pb.getDestination()); + model.setEnableEncryption(pb.getEnableEncryption()); + model.setEncryptionType(pb.getEncryptionType()); + model.setEndpoint(pb.getEndpoint()); + model.setKmsKey(pb.getKmsKey()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class S3StorageInfoSerializer extends JsonSerializer { + @Override + public void serialize(S3StorageInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + S3StorageInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class S3StorageInfoDeserializer extends JsonDeserializer { + @Override + public S3StorageInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + S3StorageInfoPb pb = mapper.readValue(p, S3StorageInfoPb.class); + return S3StorageInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfoPb.java new file mode 100755 index 000000000..1b9a4f60d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfoPb.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location in Amazon S3 */ +@Generated +class S3StorageInfoPb { + @JsonProperty("canned_acl") + private String cannedAcl; + + @JsonProperty("destination") + private String destination; + + @JsonProperty("enable_encryption") + private Boolean enableEncryption; + + @JsonProperty("encryption_type") + private String encryptionType; + + @JsonProperty("endpoint") + private String endpoint; + + @JsonProperty("kms_key") + private String kmsKey; + + @JsonProperty("region") + private String region; + + public S3StorageInfoPb setCannedAcl(String cannedAcl) { + this.cannedAcl = cannedAcl; + return this; + } + + public String getCannedAcl() { + return cannedAcl; + } + + public S3StorageInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + public S3StorageInfoPb setEnableEncryption(Boolean enableEncryption) { + this.enableEncryption = enableEncryption; + return this; + } + + public Boolean getEnableEncryption() { + return enableEncryption; + } + + public S3StorageInfoPb setEncryptionType(String encryptionType) { + this.encryptionType = encryptionType; + return this; + } + + public String getEncryptionType() { + return encryptionType; + } + + public S3StorageInfoPb setEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + public String getEndpoint() { + return endpoint; + } + + public S3StorageInfoPb setKmsKey(String kmsKey) { + this.kmsKey = kmsKey; + return this; + } + + public String getKmsKey() { + return kmsKey; + } + + public S3StorageInfoPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + S3StorageInfoPb that = (S3StorageInfoPb) o; + return Objects.equals(cannedAcl, that.cannedAcl) + && Objects.equals(destination, that.destination) + && Objects.equals(enableEncryption, that.enableEncryption) + && Objects.equals(encryptionType, that.encryptionType) + && Objects.equals(endpoint, that.endpoint) + && Objects.equals(kmsKey, that.kmsKey) + && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash( + cannedAcl, destination, enableEncryption, encryptionType, endpoint, kmsKey, region); + } + + @Override + public String toString() { + return new ToStringer(S3StorageInfoPb.class) + .add("cannedAcl", cannedAcl) + .add("destination", destination) + .add("enableEncryption", enableEncryption) + .add("encryptionType", encryptionType) + .add("endpoint", endpoint) + .add("kmsKey", kmsKey) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java index 4f24cf973..daf07a062 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Describes a specific Spark driver or executor. */ @Generated +@JsonSerialize(using = SparkNode.SparkNodeSerializer.class) +@JsonDeserialize(using = SparkNode.SparkNodeDeserializer.class) public class SparkNode { /** The private IP address of the host instance. */ - @JsonProperty("host_private_ip") private String hostPrivateIp; /** Globally unique identifier for the host instance from the cloud provider. */ - @JsonProperty("instance_id") private String instanceId; /** Attributes specific to AWS for a Spark node. */ - @JsonProperty("node_aws_attributes") private SparkNodeAwsAttributes nodeAwsAttributes; /** Globally unique identifier for this node. */ - @JsonProperty("node_id") private String nodeId; /** * Private IP address (typically a 10.x.x.x address) of the Spark node. Note that this is * different from the private IP address of the host instance. */ - @JsonProperty("private_ip") private String privateIp; /** @@ -38,11 +44,9 @@ public class SparkNode { * the driver node. To communicate with the JDBC server, traffic must be manually authorized by * adding security group rules to the "worker-unmanaged" security group via the AWS console. */ - @JsonProperty("public_dns") private String publicDns; /** The timestamp (in millisecond) when the Spark node is launched. */ - @JsonProperty("start_timestamp") private Long startTimestamp; public SparkNode setHostPrivateIp(String hostPrivateIp) { @@ -140,4 +144,49 @@ public String toString() { .add("startTimestamp", startTimestamp) .toString(); } + + SparkNodePb toPb() { + SparkNodePb pb = new SparkNodePb(); + pb.setHostPrivateIp(hostPrivateIp); + pb.setInstanceId(instanceId); + pb.setNodeAwsAttributes(nodeAwsAttributes); + pb.setNodeId(nodeId); + pb.setPrivateIp(privateIp); + pb.setPublicDns(publicDns); + pb.setStartTimestamp(startTimestamp); + + return pb; + } + + static SparkNode fromPb(SparkNodePb pb) { + SparkNode model = new SparkNode(); + model.setHostPrivateIp(pb.getHostPrivateIp()); + model.setInstanceId(pb.getInstanceId()); + model.setNodeAwsAttributes(pb.getNodeAwsAttributes()); + model.setNodeId(pb.getNodeId()); + model.setPrivateIp(pb.getPrivateIp()); + model.setPublicDns(pb.getPublicDns()); + model.setStartTimestamp(pb.getStartTimestamp()); + + return model; + } + + public static class SparkNodeSerializer extends JsonSerializer { + @Override + public void serialize(SparkNode value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkNodePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkNodeDeserializer extends JsonDeserializer { + @Override + public SparkNode deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkNodePb pb = mapper.readValue(p, SparkNodePb.class); + return SparkNode.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java index b3f24f2de..3636b12b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Attributes specific to AWS for a Spark node. */ @Generated +@JsonSerialize(using = SparkNodeAwsAttributes.SparkNodeAwsAttributesSerializer.class) +@JsonDeserialize(using = SparkNodeAwsAttributes.SparkNodeAwsAttributesDeserializer.class) public class SparkNodeAwsAttributes { /** Whether this node is on an Amazon spot instance. */ - @JsonProperty("is_spot") private Boolean isSpot; public SparkNodeAwsAttributes setIsSpot(Boolean isSpot) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(SparkNodeAwsAttributes.class).add("isSpot", isSpot).toString(); } + + SparkNodeAwsAttributesPb toPb() { + SparkNodeAwsAttributesPb pb = new SparkNodeAwsAttributesPb(); + pb.setIsSpot(isSpot); + + return pb; + } + + static SparkNodeAwsAttributes fromPb(SparkNodeAwsAttributesPb pb) { + SparkNodeAwsAttributes model = new SparkNodeAwsAttributes(); + model.setIsSpot(pb.getIsSpot()); + + return model; + } + + public static class SparkNodeAwsAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + SparkNodeAwsAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkNodeAwsAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkNodeAwsAttributesDeserializer + extends JsonDeserializer { + @Override + public SparkNodeAwsAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkNodeAwsAttributesPb pb = mapper.readValue(p, SparkNodeAwsAttributesPb.class); + return SparkNodeAwsAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributesPb.java new file mode 100755 index 000000000..ca8e3c297 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributesPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Attributes specific to AWS for a Spark node. */ +@Generated +class SparkNodeAwsAttributesPb { + @JsonProperty("is_spot") + private Boolean isSpot; + + public SparkNodeAwsAttributesPb setIsSpot(Boolean isSpot) { + this.isSpot = isSpot; + return this; + } + + public Boolean getIsSpot() { + return isSpot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkNodeAwsAttributesPb that = (SparkNodeAwsAttributesPb) o; + return Objects.equals(isSpot, that.isSpot); + } + + @Override + public int hashCode() { + return Objects.hash(isSpot); + } + + @Override + public String toString() { + return new ToStringer(SparkNodeAwsAttributesPb.class).add("isSpot", isSpot).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodePb.java new file mode 100755 index 000000000..9490bdea6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodePb.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Describes a specific Spark driver or executor. */ +@Generated +class SparkNodePb { + @JsonProperty("host_private_ip") + private String hostPrivateIp; + + @JsonProperty("instance_id") + private String instanceId; + + @JsonProperty("node_aws_attributes") + private SparkNodeAwsAttributes nodeAwsAttributes; + + @JsonProperty("node_id") + private String nodeId; + + @JsonProperty("private_ip") + private String privateIp; + + @JsonProperty("public_dns") + private String publicDns; + + @JsonProperty("start_timestamp") + private Long startTimestamp; + + public SparkNodePb setHostPrivateIp(String hostPrivateIp) { + this.hostPrivateIp = hostPrivateIp; + return this; + } + + public String getHostPrivateIp() { + return hostPrivateIp; + } + + public SparkNodePb setInstanceId(String instanceId) { + this.instanceId = instanceId; + return this; + } + + public String getInstanceId() { + return instanceId; + } + + public SparkNodePb setNodeAwsAttributes(SparkNodeAwsAttributes nodeAwsAttributes) { + this.nodeAwsAttributes = nodeAwsAttributes; + return this; + } + + public SparkNodeAwsAttributes getNodeAwsAttributes() { + return nodeAwsAttributes; + } + + public SparkNodePb setNodeId(String nodeId) { + this.nodeId = nodeId; + return this; + } + + public String getNodeId() { + return nodeId; + } + + public SparkNodePb setPrivateIp(String privateIp) { + this.privateIp = privateIp; + return this; + } + + public String getPrivateIp() { + return privateIp; + } + + public SparkNodePb setPublicDns(String publicDns) { + this.publicDns = publicDns; + return this; + } + + public String getPublicDns() { + return publicDns; + } + + public SparkNodePb setStartTimestamp(Long startTimestamp) { + this.startTimestamp = startTimestamp; + return this; + } + + public Long getStartTimestamp() { + return startTimestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkNodePb that = (SparkNodePb) o; + return Objects.equals(hostPrivateIp, that.hostPrivateIp) + && Objects.equals(instanceId, that.instanceId) + && Objects.equals(nodeAwsAttributes, that.nodeAwsAttributes) + && Objects.equals(nodeId, that.nodeId) + && Objects.equals(privateIp, that.privateIp) + && Objects.equals(publicDns, that.publicDns) + && Objects.equals(startTimestamp, that.startTimestamp); + } + + @Override + public int hashCode() { + return Objects.hash( + hostPrivateIp, instanceId, nodeAwsAttributes, nodeId, privateIp, publicDns, startTimestamp); + } + + @Override + public String toString() { + return new ToStringer(SparkNodePb.class) + .add("hostPrivateIp", hostPrivateIp) + .add("instanceId", instanceId) + .add("nodeAwsAttributes", nodeAwsAttributes) + .add("nodeId", nodeId) + .add("privateIp", privateIp) + .add("publicDns", publicDns) + .add("startTimestamp", startTimestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java index 2b23c2dc1..43c684737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SparkVersion.SparkVersionSerializer.class) +@JsonDeserialize(using = SparkVersion.SparkVersionDeserializer.class) public class SparkVersion { /** * Spark version key, for example "2.1.x-scala2.11". This is the value which should be provided as @@ -15,11 +26,9 @@ public class SparkVersion { * over time for a "wildcard" version (i.e., "2.1.x-scala2.11" is a "wildcard" version) with minor * bug fixes. */ - @JsonProperty("key") private String key; /** A descriptive name for this Spark version, for example "Spark 2.1". */ - @JsonProperty("name") private String name; public SparkVersion setKey(String key) { @@ -57,4 +66,39 @@ public int hashCode() { public String toString() { return new ToStringer(SparkVersion.class).add("key", key).add("name", name).toString(); } + + SparkVersionPb toPb() { + SparkVersionPb pb = new SparkVersionPb(); + pb.setKey(key); + pb.setName(name); + + return pb; + } + + static SparkVersion fromPb(SparkVersionPb pb) { + SparkVersion model = new SparkVersion(); + model.setKey(pb.getKey()); + model.setName(pb.getName()); + + return model; + } + + public static class SparkVersionSerializer extends JsonSerializer { + @Override + public void serialize(SparkVersion value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkVersionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkVersionDeserializer extends JsonDeserializer { + @Override + public SparkVersion deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkVersionPb pb = mapper.readValue(p, SparkVersionPb.class); + return SparkVersion.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersionPb.java new file mode 100755 index 000000000..5bc2d407f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersionPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SparkVersionPb { + @JsonProperty("key") + private String key; + + @JsonProperty("name") + private String name; + + public SparkVersionPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SparkVersionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkVersionPb that = (SparkVersionPb) o; + return Objects.equals(key, that.key) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(key, name); + } + + @Override + public String toString() { + return new ToStringer(SparkVersionPb.class).add("key", key).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java index 0f3795dae..cbebbe164 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StartCluster.StartClusterSerializer.class) +@JsonDeserialize(using = StartCluster.StartClusterDeserializer.class) public class StartCluster { /** The cluster to be started. */ - @JsonProperty("cluster_id") private String clusterId; public StartCluster setClusterId(String clusterId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(StartCluster.class).add("clusterId", clusterId).toString(); } + + StartClusterPb toPb() { + StartClusterPb pb = new StartClusterPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static StartCluster fromPb(StartClusterPb pb) { + StartCluster model = new StartCluster(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class StartClusterSerializer extends JsonSerializer { + @Override + public void serialize(StartCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartClusterDeserializer extends JsonDeserializer { + @Override + public StartCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartClusterPb pb = mapper.readValue(p, StartClusterPb.class); + return StartCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterPb.java new file mode 100755 index 000000000..03d5bcf79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StartClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + public StartClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartClusterPb that = (StartClusterPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(StartClusterPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java index ccdb37a29..214fe0ca6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StartClusterResponse.StartClusterResponseSerializer.class) +@JsonDeserialize(using = StartClusterResponse.StartClusterResponseDeserializer.class) public class StartClusterResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(StartClusterResponse.class).toString(); } + + StartClusterResponsePb toPb() { + StartClusterResponsePb pb = new StartClusterResponsePb(); + + return pb; + } + + static StartClusterResponse fromPb(StartClusterResponsePb pb) { + StartClusterResponse model = new StartClusterResponse(); + + return model; + } + + public static class StartClusterResponseSerializer extends JsonSerializer { + @Override + public void serialize( + StartClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartClusterResponseDeserializer + extends JsonDeserializer { + @Override + public StartClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartClusterResponsePb pb = mapper.readValue(p, StartClusterResponsePb.class); + return StartClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponsePb.java new file mode 100755 index 000000000..fc5b61289 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class StartClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(StartClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java index e3166e7fe..18af7412e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = TerminationReason.TerminationReasonSerializer.class) +@JsonDeserialize(using = TerminationReason.TerminationReasonDeserializer.class) public class TerminationReason { /** status code indicating why the cluster was terminated */ - @JsonProperty("code") private TerminationReasonCode code; /** list of parameters that provide additional information about why the cluster was terminated */ - @JsonProperty("parameters") private Map parameters; /** type of the termination */ - @JsonProperty("type") private TerminationReasonType typeValue; public TerminationReason setCode(TerminationReasonCode code) { @@ -72,4 +80,42 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + TerminationReasonPb toPb() { + TerminationReasonPb pb = new TerminationReasonPb(); + pb.setCode(code); + pb.setParameters(parameters); + pb.setType(typeValue); + + return pb; + } + + static TerminationReason fromPb(TerminationReasonPb pb) { + TerminationReason model = new TerminationReason(); + model.setCode(pb.getCode()); + model.setParameters(pb.getParameters()); + model.setType(pb.getType()); + + return model; + } + + public static class TerminationReasonSerializer extends JsonSerializer { + @Override + public void serialize(TerminationReason value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TerminationReasonPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TerminationReasonDeserializer extends JsonDeserializer { + @Override + public TerminationReason deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TerminationReasonPb pb = mapper.readValue(p, TerminationReasonPb.class); + return TerminationReason.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 8eba3a564..74a38a68c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -76,6 +76,7 @@ public enum TerminationReasonCode { DRIVER_OUT_OF_MEMORY, DRIVER_POD_CREATION_FAILURE, DRIVER_UNEXPECTED_FAILURE, + DRIVER_UNHEALTHY, DRIVER_UNREACHABLE, DRIVER_UNRESPONSIVE, DYNAMIC_SPARK_CONF_SIZE_EXCEEDED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonPb.java new file mode 100755 index 000000000..cac8e6544 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class TerminationReasonPb { + @JsonProperty("code") + private TerminationReasonCode code; + + @JsonProperty("parameters") + private Map parameters; + + @JsonProperty("type") + private TerminationReasonType typeValue; + + public TerminationReasonPb setCode(TerminationReasonCode code) { + this.code = code; + return this; + } + + public TerminationReasonCode getCode() { + return code; + } + + public TerminationReasonPb setParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + public Map getParameters() { + return parameters; + } + + public TerminationReasonPb setType(TerminationReasonType typeValue) { + this.typeValue = typeValue; + return this; + } + + public TerminationReasonType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TerminationReasonPb that = (TerminationReasonPb) o; + return Objects.equals(code, that.code) + && Objects.equals(parameters, that.parameters) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(code, parameters, typeValue); + } + + @Override + public String toString() { + return new ToStringer(TerminationReasonPb.class) + .add("code", code) + .add("parameters", parameters) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java index b1a240858..045491a5f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UninstallLibraries.UninstallLibrariesSerializer.class) +@JsonDeserialize(using = UninstallLibraries.UninstallLibrariesDeserializer.class) public class UninstallLibraries { /** Unique identifier for the cluster on which to uninstall these libraries. */ - @JsonProperty("cluster_id") private String clusterId; /** The libraries to uninstall. */ - @JsonProperty("libraries") private Collection libraries; public UninstallLibraries setClusterId(String clusterId) { @@ -56,4 +65,40 @@ public String toString() { .add("libraries", libraries) .toString(); } + + UninstallLibrariesPb toPb() { + UninstallLibrariesPb pb = new UninstallLibrariesPb(); + pb.setClusterId(clusterId); + pb.setLibraries(libraries); + + return pb; + } + + static UninstallLibraries fromPb(UninstallLibrariesPb pb) { + UninstallLibraries model = new UninstallLibraries(); + model.setClusterId(pb.getClusterId()); + model.setLibraries(pb.getLibraries()); + + return model; + } + + public static class UninstallLibrariesSerializer extends JsonSerializer { + @Override + public void serialize(UninstallLibraries value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UninstallLibrariesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UninstallLibrariesDeserializer extends JsonDeserializer { + @Override + public UninstallLibraries deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UninstallLibrariesPb pb = mapper.readValue(p, UninstallLibrariesPb.class); + return UninstallLibraries.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesPb.java new file mode 100755 index 000000000..ae1abd87e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UninstallLibrariesPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("libraries") + private Collection libraries; + + public UninstallLibrariesPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public UninstallLibrariesPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UninstallLibrariesPb that = (UninstallLibrariesPb) o; + return Objects.equals(clusterId, that.clusterId) && Objects.equals(libraries, that.libraries); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, libraries); + } + + @Override + public String toString() { + return new ToStringer(UninstallLibrariesPb.class) + .add("clusterId", clusterId) + .add("libraries", libraries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java index 33d5b7d3c..567589ab4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UninstallLibrariesResponse.UninstallLibrariesResponseSerializer.class) +@JsonDeserialize(using = UninstallLibrariesResponse.UninstallLibrariesResponseDeserializer.class) public class UninstallLibrariesResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UninstallLibrariesResponse.class).toString(); } + + UninstallLibrariesResponsePb toPb() { + UninstallLibrariesResponsePb pb = new UninstallLibrariesResponsePb(); + + return pb; + } + + static UninstallLibrariesResponse fromPb(UninstallLibrariesResponsePb pb) { + UninstallLibrariesResponse model = new UninstallLibrariesResponse(); + + return model; + } + + public static class UninstallLibrariesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UninstallLibrariesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UninstallLibrariesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UninstallLibrariesResponseDeserializer + extends JsonDeserializer { + @Override + public UninstallLibrariesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UninstallLibrariesResponsePb pb = mapper.readValue(p, UninstallLibrariesResponsePb.class); + return UninstallLibrariesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponsePb.java new file mode 100755 index 000000000..ead3b85e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UninstallLibrariesResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UninstallLibrariesResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java index 9907dd125..045e56839 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UnpinCluster.UnpinClusterSerializer.class) +@JsonDeserialize(using = UnpinCluster.UnpinClusterDeserializer.class) public class UnpinCluster { /** */ - @JsonProperty("cluster_id") private String clusterId; public UnpinCluster setClusterId(String clusterId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(UnpinCluster.class).add("clusterId", clusterId).toString(); } + + UnpinClusterPb toPb() { + UnpinClusterPb pb = new UnpinClusterPb(); + pb.setClusterId(clusterId); + + return pb; + } + + static UnpinCluster fromPb(UnpinClusterPb pb) { + UnpinCluster model = new UnpinCluster(); + model.setClusterId(pb.getClusterId()); + + return model; + } + + public static class UnpinClusterSerializer extends JsonSerializer { + @Override + public void serialize(UnpinCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnpinClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnpinClusterDeserializer extends JsonDeserializer { + @Override + public UnpinCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnpinClusterPb pb = mapper.readValue(p, UnpinClusterPb.class); + return UnpinCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterPb.java new file mode 100755 index 000000000..82a706de2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UnpinClusterPb { + @JsonProperty("cluster_id") + private String clusterId; + + public UnpinClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UnpinClusterPb that = (UnpinClusterPb) o; + return Objects.equals(clusterId, that.clusterId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId); + } + + @Override + public String toString() { + return new ToStringer(UnpinClusterPb.class).add("clusterId", clusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java index 74a883deb..a60eab724 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UnpinClusterResponse.UnpinClusterResponseSerializer.class) +@JsonDeserialize(using = UnpinClusterResponse.UnpinClusterResponseDeserializer.class) public class UnpinClusterResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(UnpinClusterResponse.class).toString(); } + + UnpinClusterResponsePb toPb() { + UnpinClusterResponsePb pb = new UnpinClusterResponsePb(); + + return pb; + } + + static UnpinClusterResponse fromPb(UnpinClusterResponsePb pb) { + UnpinClusterResponse model = new UnpinClusterResponse(); + + return model; + } + + public static class UnpinClusterResponseSerializer extends JsonSerializer { + @Override + public void serialize( + UnpinClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnpinClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnpinClusterResponseDeserializer + extends JsonDeserializer { + @Override + public UnpinClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnpinClusterResponsePb pb = mapper.readValue(p, UnpinClusterResponsePb.class); + return UnpinClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponsePb.java new file mode 100755 index 000000000..8377003dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UnpinClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UnpinClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java index 9d4ea8132..121539b74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCluster.UpdateClusterSerializer.class) +@JsonDeserialize(using = UpdateCluster.UpdateClusterDeserializer.class) public class UpdateCluster { /** The cluster to be updated. */ - @JsonProperty("cluster") private UpdateClusterResource cluster; /** ID of the cluster. */ - @JsonProperty("cluster_id") private String clusterId; /** @@ -31,7 +40,6 @@ public class UpdateCluster { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("update_mask") private String updateMask; public UpdateCluster setCluster(UpdateClusterResource cluster) { @@ -84,4 +92,41 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateClusterPb toPb() { + UpdateClusterPb pb = new UpdateClusterPb(); + pb.setCluster(cluster); + pb.setClusterId(clusterId); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateCluster fromPb(UpdateClusterPb pb) { + UpdateCluster model = new UpdateCluster(); + model.setCluster(pb.getCluster()); + model.setClusterId(pb.getClusterId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateClusterSerializer extends JsonSerializer { + @Override + public void serialize(UpdateCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateClusterDeserializer extends JsonDeserializer { + @Override + public UpdateCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateClusterPb pb = mapper.readValue(p, UpdateClusterPb.class); + return UpdateCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterPb.java new file mode 100755 index 000000000..7a5caae01 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateClusterPb { + @JsonProperty("cluster") + private UpdateClusterResource cluster; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("update_mask") + private String updateMask; + + public UpdateClusterPb setCluster(UpdateClusterResource cluster) { + this.cluster = cluster; + return this; + } + + public UpdateClusterResource getCluster() { + return cluster; + } + + public UpdateClusterPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public UpdateClusterPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateClusterPb that = (UpdateClusterPb) o; + return Objects.equals(cluster, that.cluster) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(cluster, clusterId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateClusterPb.class) + .add("cluster", cluster) + .add("clusterId", clusterId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 7d3e13c7d..6deae424b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateClusterResource.UpdateClusterResourceSerializer.class) +@JsonDeserialize(using = UpdateClusterResource.UpdateClusterResourceDeserializer.class) public class UpdateClusterResource { /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private AutoScale autoscale; /** @@ -24,21 +34,18 @@ public class UpdateClusterResource { * 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic * termination. */ - @JsonProperty("autotermination_minutes") private Long autoterminationMinutes; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; /** @@ -48,7 +55,6 @@ public class UpdateClusterResource { * every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the * destination of executor logs is `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private ClusterLogConf clusterLogConf; /** @@ -56,7 +62,6 @@ public class UpdateClusterResource { * creation, the cluster name will be an empty string. For job clusters, the cluster name is * automatically set based on the job and job run IDs. */ - @JsonProperty("cluster_name") private String clusterName; /** @@ -68,7 +73,6 @@ public class UpdateClusterResource { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -98,18 +102,15 @@ public class UpdateClusterResource { * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way * that doesn’t have UC nor passthrough enabled. */ - @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; /** Custom docker image BYOC */ - @JsonProperty("docker_image") private DockerImage dockerImage; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** @@ -120,7 +121,6 @@ public class UpdateClusterResource { * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, * driver_node_type_id and node_type_id take precedence. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** @@ -128,18 +128,15 @@ public class UpdateClusterResource { * space when its Spark workers are running low on disk space. This feature requires specific AWS * permissions to function correctly - refer to the User Guide for more details. */ - @JsonProperty("enable_elastic_disk") private Boolean enableElasticDisk; /** Whether to enable LUKS on cluster VMs' local disks */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private GcpAttributes gcpAttributes; /** @@ -147,11 +144,9 @@ public class UpdateClusterResource { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -160,7 +155,6 @@ public class UpdateClusterResource { *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` */ - @JsonProperty("is_single_node") private Boolean isSingleNode; /** @@ -178,7 +172,6 @@ public class UpdateClusterResource { * *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ - @JsonProperty("kind") private Kind kind; /** @@ -187,7 +180,6 @@ public class UpdateClusterResource { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -200,21 +192,11 @@ public class UpdateClusterResource { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -224,11 +206,9 @@ public class UpdateClusterResource { *

If left unspecified, the runtime engine defaults to standard unless the spark_version * contains -photon-, in which case Photon will be used. */ - @JsonProperty("runtime_engine") private RuntimeEngine runtimeEngine; /** Single user name if data_security_mode is `SINGLE_USER` */ - @JsonProperty("single_user_name") private String singleUserName; /** @@ -236,7 +216,6 @@ public class UpdateClusterResource { * Users can also pass in a string of extra JVM options to the driver and the executors via * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -252,14 +231,12 @@ public class UpdateClusterResource { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** * The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions * can be retrieved by using the :method:clusters/sparkVersions API call. */ - @JsonProperty("spark_version") private String sparkVersion; /** @@ -267,24 +244,17 @@ public class UpdateClusterResource { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. */ - @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; /** Cluster Attributes showing for clusters workload types. */ - @JsonProperty("workload_type") private WorkloadType workloadType; public UpdateClusterResource setAutoscale(AutoScale autoscale) { @@ -476,24 +446,6 @@ public String getPolicyId() { return policyId; } - public UpdateClusterResource setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public UpdateClusterResource setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public UpdateClusterResource setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -548,16 +500,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public UpdateClusterResource setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public UpdateClusterResource setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -602,15 +544,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -639,15 +578,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -676,17 +612,107 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); } + + UpdateClusterResourcePb toPb() { + UpdateClusterResourcePb pb = new UpdateClusterResourcePb(); + pb.setAutoscale(autoscale); + pb.setAutoterminationMinutes(autoterminationMinutes); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterLogConf(clusterLogConf); + pb.setClusterName(clusterName); + pb.setCustomTags(customTags); + pb.setDataSecurityMode(dataSecurityMode); + pb.setDockerImage(dockerImage); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableElasticDisk(enableElasticDisk); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setIsSingleNode(isSingleNode); + pb.setKind(kind); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setRuntimeEngine(runtimeEngine); + pb.setSingleUserName(singleUserName); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSparkVersion(sparkVersion); + pb.setSshPublicKeys(sshPublicKeys); + pb.setUseMlRuntime(useMlRuntime); + pb.setWorkloadType(workloadType); + + return pb; + } + + static UpdateClusterResource fromPb(UpdateClusterResourcePb pb) { + UpdateClusterResource model = new UpdateClusterResource(); + model.setAutoscale(pb.getAutoscale()); + model.setAutoterminationMinutes(pb.getAutoterminationMinutes()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setClusterName(pb.getClusterName()); + model.setCustomTags(pb.getCustomTags()); + model.setDataSecurityMode(pb.getDataSecurityMode()); + model.setDockerImage(pb.getDockerImage()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableElasticDisk(pb.getEnableElasticDisk()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setIsSingleNode(pb.getIsSingleNode()); + model.setKind(pb.getKind()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setRuntimeEngine(pb.getRuntimeEngine()); + model.setSingleUserName(pb.getSingleUserName()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSparkVersion(pb.getSparkVersion()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + model.setUseMlRuntime(pb.getUseMlRuntime()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class UpdateClusterResourceSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateClusterResource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateClusterResourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateClusterResourceDeserializer + extends JsonDeserializer { + @Override + public UpdateClusterResource deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateClusterResourcePb pb = mapper.readValue(p, UpdateClusterResourcePb.class); + return UpdateClusterResource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResourcePb.java new file mode 100755 index 000000000..01b5dd4f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResourcePb.java @@ -0,0 +1,466 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class UpdateClusterResourcePb { + @JsonProperty("autoscale") + private AutoScale autoscale; + + @JsonProperty("autotermination_minutes") + private Long autoterminationMinutes; + + @JsonProperty("aws_attributes") + private AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private AzureAttributes azureAttributes; + + @JsonProperty("cluster_log_conf") + private ClusterLogConf clusterLogConf; + + @JsonProperty("cluster_name") + private String clusterName; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("data_security_mode") + private DataSecurityMode dataSecurityMode; + + @JsonProperty("docker_image") + private DockerImage dockerImage; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_elastic_disk") + private Boolean enableElasticDisk; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("is_single_node") + private Boolean isSingleNode; + + @JsonProperty("kind") + private Kind kind; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("runtime_engine") + private RuntimeEngine runtimeEngine; + + @JsonProperty("single_user_name") + private String singleUserName; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("spark_version") + private String sparkVersion; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + @JsonProperty("use_ml_runtime") + private Boolean useMlRuntime; + + @JsonProperty("workload_type") + private WorkloadType workloadType; + + public UpdateClusterResourcePb setAutoscale(AutoScale autoscale) { + this.autoscale = autoscale; + return this; + } + + public AutoScale getAutoscale() { + return autoscale; + } + + public UpdateClusterResourcePb setAutoterminationMinutes(Long autoterminationMinutes) { + this.autoterminationMinutes = autoterminationMinutes; + return this; + } + + public Long getAutoterminationMinutes() { + return autoterminationMinutes; + } + + public UpdateClusterResourcePb setAwsAttributes(AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public UpdateClusterResourcePb setAzureAttributes(AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public UpdateClusterResourcePb setClusterLogConf(ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public UpdateClusterResourcePb setClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public String getClusterName() { + return clusterName; + } + + public UpdateClusterResourcePb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public UpdateClusterResourcePb setDataSecurityMode(DataSecurityMode dataSecurityMode) { + this.dataSecurityMode = dataSecurityMode; + return this; + } + + public DataSecurityMode getDataSecurityMode() { + return dataSecurityMode; + } + + public UpdateClusterResourcePb setDockerImage(DockerImage dockerImage) { + this.dockerImage = dockerImage; + return this; + } + + public DockerImage getDockerImage() { + return dockerImage; + } + + public UpdateClusterResourcePb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public UpdateClusterResourcePb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public UpdateClusterResourcePb setEnableElasticDisk(Boolean enableElasticDisk) { + this.enableElasticDisk = enableElasticDisk; + return this; + } + + public Boolean getEnableElasticDisk() { + return enableElasticDisk; + } + + public UpdateClusterResourcePb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public UpdateClusterResourcePb setGcpAttributes(GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public UpdateClusterResourcePb setInitScripts(Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public UpdateClusterResourcePb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public UpdateClusterResourcePb setIsSingleNode(Boolean isSingleNode) { + this.isSingleNode = isSingleNode; + return this; + } + + public Boolean getIsSingleNode() { + return isSingleNode; + } + + public UpdateClusterResourcePb setKind(Kind kind) { + this.kind = kind; + return this; + } + + public Kind getKind() { + return kind; + } + + public UpdateClusterResourcePb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public UpdateClusterResourcePb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public UpdateClusterResourcePb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public UpdateClusterResourcePb setRuntimeEngine(RuntimeEngine runtimeEngine) { + this.runtimeEngine = runtimeEngine; + return this; + } + + public RuntimeEngine getRuntimeEngine() { + return runtimeEngine; + } + + public UpdateClusterResourcePb setSingleUserName(String singleUserName) { + this.singleUserName = singleUserName; + return this; + } + + public String getSingleUserName() { + return singleUserName; + } + + public UpdateClusterResourcePb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public UpdateClusterResourcePb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public UpdateClusterResourcePb setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + return this; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public UpdateClusterResourcePb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + public UpdateClusterResourcePb setUseMlRuntime(Boolean useMlRuntime) { + this.useMlRuntime = useMlRuntime; + return this; + } + + public Boolean getUseMlRuntime() { + return useMlRuntime; + } + + public UpdateClusterResourcePb setWorkloadType(WorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public WorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateClusterResourcePb that = (UpdateClusterResourcePb) o; + return Objects.equals(autoscale, that.autoscale) + && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(clusterName, that.clusterName) + && Objects.equals(customTags, that.customTags) + && Objects.equals(dataSecurityMode, that.dataSecurityMode) + && Objects.equals(dockerImage, that.dockerImage) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableElasticDisk, that.enableElasticDisk) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(isSingleNode, that.isSingleNode) + && Objects.equals(kind, that.kind) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(runtimeEngine, that.runtimeEngine) + && Objects.equals(singleUserName, that.singleUserName) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(sshPublicKeys, that.sshPublicKeys) + && Objects.equals(useMlRuntime, that.useMlRuntime) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscale, + autoterminationMinutes, + awsAttributes, + azureAttributes, + clusterLogConf, + clusterName, + customTags, + dataSecurityMode, + dockerImage, + driverInstancePoolId, + driverNodeTypeId, + enableElasticDisk, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + isSingleNode, + kind, + nodeTypeId, + numWorkers, + policyId, + runtimeEngine, + singleUserName, + sparkConf, + sparkEnvVars, + sparkVersion, + sshPublicKeys, + useMlRuntime, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(UpdateClusterResourcePb.class) + .add("autoscale", autoscale) + .add("autoterminationMinutes", autoterminationMinutes) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterLogConf", clusterLogConf) + .add("clusterName", clusterName) + .add("customTags", customTags) + .add("dataSecurityMode", dataSecurityMode) + .add("dockerImage", dockerImage) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableElasticDisk", enableElasticDisk) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("isSingleNode", isSingleNode) + .add("kind", kind) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("runtimeEngine", runtimeEngine) + .add("singleUserName", singleUserName) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sparkVersion", sparkVersion) + .add("sshPublicKeys", sshPublicKeys) + .add("useMlRuntime", useMlRuntime) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java index 9647b99d1..11e018679 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateClusterResponse.UpdateClusterResponseSerializer.class) +@JsonDeserialize(using = UpdateClusterResponse.UpdateClusterResponseDeserializer.class) public class UpdateClusterResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateClusterResponse.class).toString(); } + + UpdateClusterResponsePb toPb() { + UpdateClusterResponsePb pb = new UpdateClusterResponsePb(); + + return pb; + } + + static UpdateClusterResponse fromPb(UpdateClusterResponsePb pb) { + UpdateClusterResponse model = new UpdateClusterResponse(); + + return model; + } + + public static class UpdateClusterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateClusterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateClusterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateClusterResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateClusterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateClusterResponsePb pb = mapper.readValue(p, UpdateClusterResponsePb.class); + return UpdateClusterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponsePb.java new file mode 100755 index 000000000..ae7980186 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateClusterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateClusterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java index 62feccc35..f282077b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponsePb.java new file mode 100755 index 000000000..f2b4bedbf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java index 33bc00832..dfb01acc3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location back by UC Volumes. */ @Generated +@JsonSerialize(using = VolumesStorageInfo.VolumesStorageInfoSerializer.class) +@JsonDeserialize(using = VolumesStorageInfo.VolumesStorageInfoDeserializer.class) public class VolumesStorageInfo { /** * UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or * `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` */ - @JsonProperty("destination") private String destination; public VolumesStorageInfo setDestination(String destination) { @@ -43,4 +53,38 @@ public int hashCode() { public String toString() { return new ToStringer(VolumesStorageInfo.class).add("destination", destination).toString(); } + + VolumesStorageInfoPb toPb() { + VolumesStorageInfoPb pb = new VolumesStorageInfoPb(); + pb.setDestination(destination); + + return pb; + } + + static VolumesStorageInfo fromPb(VolumesStorageInfoPb pb) { + VolumesStorageInfo model = new VolumesStorageInfo(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class VolumesStorageInfoSerializer extends JsonSerializer { + @Override + public void serialize(VolumesStorageInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VolumesStorageInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VolumesStorageInfoDeserializer extends JsonDeserializer { + @Override + public VolumesStorageInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VolumesStorageInfoPb pb = mapper.readValue(p, VolumesStorageInfoPb.class); + return VolumesStorageInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfoPb.java new file mode 100755 index 000000000..247564974 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location back by UC Volumes. */ +@Generated +class VolumesStorageInfoPb { + @JsonProperty("destination") + private String destination; + + public VolumesStorageInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VolumesStorageInfoPb that = (VolumesStorageInfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(VolumesStorageInfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java index 6faf119c7..19f3d8055 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Cluster Attributes showing for clusters workload types. */ @Generated +@JsonSerialize(using = WorkloadType.WorkloadTypeSerializer.class) +@JsonDeserialize(using = WorkloadType.WorkloadTypeDeserializer.class) public class WorkloadType { /** defined what type of clients can use the cluster. E.g. Notebooks, Jobs */ - @JsonProperty("clients") private ClientsTypes clients; public WorkloadType setClients(ClientsTypes clients) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(WorkloadType.class).add("clients", clients).toString(); } + + WorkloadTypePb toPb() { + WorkloadTypePb pb = new WorkloadTypePb(); + pb.setClients(clients); + + return pb; + } + + static WorkloadType fromPb(WorkloadTypePb pb) { + WorkloadType model = new WorkloadType(); + model.setClients(pb.getClients()); + + return model; + } + + public static class WorkloadTypeSerializer extends JsonSerializer { + @Override + public void serialize(WorkloadType value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkloadTypePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkloadTypeDeserializer extends JsonDeserializer { + @Override + public WorkloadType deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkloadTypePb pb = mapper.readValue(p, WorkloadTypePb.class); + return WorkloadType.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTypePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTypePb.java new file mode 100755 index 000000000..6cc9876e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTypePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Cluster Attributes showing for clusters workload types. */ +@Generated +class WorkloadTypePb { + @JsonProperty("clients") + private ClientsTypes clients; + + public WorkloadTypePb setClients(ClientsTypes clients) { + this.clients = clients; + return this; + } + + public ClientsTypes getClients() { + return clients; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkloadTypePb that = (WorkloadTypePb) o; + return Objects.equals(clients, that.clients); + } + + @Override + public int hashCode() { + return Objects.hash(clients); + } + + @Override + public String toString() { + return new ToStringer(WorkloadTypePb.class).add("clients", clients).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java index 0537dbb67..32572333d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A storage location in Workspace Filesystem (WSFS) */ @Generated +@JsonSerialize(using = WorkspaceStorageInfo.WorkspaceStorageInfoSerializer.class) +@JsonDeserialize(using = WorkspaceStorageInfo.WorkspaceStorageInfoDeserializer.class) public class WorkspaceStorageInfo { /** wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` */ - @JsonProperty("destination") private String destination; public WorkspaceStorageInfo setDestination(String destination) { @@ -40,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(WorkspaceStorageInfo.class).add("destination", destination).toString(); } + + WorkspaceStorageInfoPb toPb() { + WorkspaceStorageInfoPb pb = new WorkspaceStorageInfoPb(); + pb.setDestination(destination); + + return pb; + } + + static WorkspaceStorageInfo fromPb(WorkspaceStorageInfoPb pb) { + WorkspaceStorageInfo model = new WorkspaceStorageInfo(); + model.setDestination(pb.getDestination()); + + return model; + } + + public static class WorkspaceStorageInfoSerializer extends JsonSerializer { + @Override + public void serialize( + WorkspaceStorageInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceStorageInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceStorageInfoDeserializer + extends JsonDeserializer { + @Override + public WorkspaceStorageInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceStorageInfoPb pb = mapper.readValue(p, WorkspaceStorageInfoPb.class); + return WorkspaceStorageInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfoPb.java new file mode 100755 index 000000000..3673f7112 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A storage location in Workspace Filesystem (WSFS) */ +@Generated +class WorkspaceStorageInfoPb { + @JsonProperty("destination") + private String destination; + + public WorkspaceStorageInfoPb setDestination(String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceStorageInfoPb that = (WorkspaceStorageInfoPb) o; + return Objects.equals(destination, that.destination); + } + + @Override + public int hashCode() { + return Objects.hash(destination); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceStorageInfoPb.class).add("destination", destination).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java index a3d94b322..e52a5764a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java @@ -4,35 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AuthorizationDetails.AuthorizationDetailsSerializer.class) +@JsonDeserialize(using = AuthorizationDetails.AuthorizationDetailsDeserializer.class) public class AuthorizationDetails { /** * Represents downscoped permission rules with specific access rights. This field is specific to * `workspace_rule_set` constraint. */ - @JsonProperty("grant_rules") private Collection grantRules; /** The acl path of the tree store resource resource. */ - @JsonProperty("resource_legacy_acl_path") private String resourceLegacyAclPath; /** * The resource name to which the authorization rule applies. This field is specific to * `workspace_rule_set` constraint. Format: `workspaces/{workspace_id}/dashboards/{dashboard_id}` */ - @JsonProperty("resource_name") private String resourceName; /** * The type of authorization downscoping policy. Ex: `workspace_rule_set` defines access rules for * a specific workspace resource */ - @JsonProperty("type") private String typeValue; public AuthorizationDetails setGrantRules(Collection grantRules) { @@ -96,4 +103,46 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + AuthorizationDetailsPb toPb() { + AuthorizationDetailsPb pb = new AuthorizationDetailsPb(); + pb.setGrantRules(grantRules); + pb.setResourceLegacyAclPath(resourceLegacyAclPath); + pb.setResourceName(resourceName); + pb.setType(typeValue); + + return pb; + } + + static AuthorizationDetails fromPb(AuthorizationDetailsPb pb) { + AuthorizationDetails model = new AuthorizationDetails(); + model.setGrantRules(pb.getGrantRules()); + model.setResourceLegacyAclPath(pb.getResourceLegacyAclPath()); + model.setResourceName(pb.getResourceName()); + model.setType(pb.getType()); + + return model; + } + + public static class AuthorizationDetailsSerializer extends JsonSerializer { + @Override + public void serialize( + AuthorizationDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AuthorizationDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AuthorizationDetailsDeserializer + extends JsonDeserializer { + @Override + public AuthorizationDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AuthorizationDetailsPb pb = mapper.readValue(p, AuthorizationDetailsPb.class); + return AuthorizationDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java index ab39e1f8b..a2c27466f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AuthorizationDetailsGrantRule.AuthorizationDetailsGrantRuleSerializer.class) +@JsonDeserialize( + using = AuthorizationDetailsGrantRule.AuthorizationDetailsGrantRuleDeserializer.class) public class AuthorizationDetailsGrantRule { /** * Permission sets for dashboard are defined in * iam-common/rbac-common/permission-sets/definitions/TreeStoreBasePermissionSets Ex: * `permissionSets/dashboard.runner` */ - @JsonProperty("permission_set") private String permissionSet; public AuthorizationDetailsGrantRule setPermissionSet(String permissionSet) { @@ -45,4 +56,42 @@ public String toString() { .add("permissionSet", permissionSet) .toString(); } + + AuthorizationDetailsGrantRulePb toPb() { + AuthorizationDetailsGrantRulePb pb = new AuthorizationDetailsGrantRulePb(); + pb.setPermissionSet(permissionSet); + + return pb; + } + + static AuthorizationDetailsGrantRule fromPb(AuthorizationDetailsGrantRulePb pb) { + AuthorizationDetailsGrantRule model = new AuthorizationDetailsGrantRule(); + model.setPermissionSet(pb.getPermissionSet()); + + return model; + } + + public static class AuthorizationDetailsGrantRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + AuthorizationDetailsGrantRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AuthorizationDetailsGrantRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AuthorizationDetailsGrantRuleDeserializer + extends JsonDeserializer { + @Override + public AuthorizationDetailsGrantRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AuthorizationDetailsGrantRulePb pb = + mapper.readValue(p, AuthorizationDetailsGrantRulePb.class); + return AuthorizationDetailsGrantRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRulePb.java new file mode 100755 index 000000000..4b2a87a54 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRulePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AuthorizationDetailsGrantRulePb { + @JsonProperty("permission_set") + private String permissionSet; + + public AuthorizationDetailsGrantRulePb setPermissionSet(String permissionSet) { + this.permissionSet = permissionSet; + return this; + } + + public String getPermissionSet() { + return permissionSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AuthorizationDetailsGrantRulePb that = (AuthorizationDetailsGrantRulePb) o; + return Objects.equals(permissionSet, that.permissionSet); + } + + @Override + public int hashCode() { + return Objects.hash(permissionSet); + } + + @Override + public String toString() { + return new ToStringer(AuthorizationDetailsGrantRulePb.class) + .add("permissionSet", permissionSet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsPb.java new file mode 100755 index 000000000..5e830d3cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AuthorizationDetailsPb { + @JsonProperty("grant_rules") + private Collection grantRules; + + @JsonProperty("resource_legacy_acl_path") + private String resourceLegacyAclPath; + + @JsonProperty("resource_name") + private String resourceName; + + @JsonProperty("type") + private String typeValue; + + public AuthorizationDetailsPb setGrantRules( + Collection grantRules) { + this.grantRules = grantRules; + return this; + } + + public Collection getGrantRules() { + return grantRules; + } + + public AuthorizationDetailsPb setResourceLegacyAclPath(String resourceLegacyAclPath) { + this.resourceLegacyAclPath = resourceLegacyAclPath; + return this; + } + + public String getResourceLegacyAclPath() { + return resourceLegacyAclPath; + } + + public AuthorizationDetailsPb setResourceName(String resourceName) { + this.resourceName = resourceName; + return this; + } + + public String getResourceName() { + return resourceName; + } + + public AuthorizationDetailsPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AuthorizationDetailsPb that = (AuthorizationDetailsPb) o; + return Objects.equals(grantRules, that.grantRules) + && Objects.equals(resourceLegacyAclPath, that.resourceLegacyAclPath) + && Objects.equals(resourceName, that.resourceName) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(grantRules, resourceLegacyAclPath, resourceName, typeValue); + } + + @Override + public String toString() { + return new ToStringer(AuthorizationDetailsPb.class) + .add("grantRules", grantRules) + .add("resourceLegacyAclPath", resourceLegacyAclPath) + .add("resourceName", resourceName) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java deleted file mode 100755 index a2b487b40..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java +++ /dev/null @@ -1,80 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Collection; -import java.util.Objects; - -/** Cancel the results for the a query for a published, embedded dashboard */ -@Generated -public class CancelPublishedQueryExecutionRequest { - /** */ - @JsonIgnore - @QueryParam("dashboard_name") - private String dashboardName; - - /** */ - @JsonIgnore - @QueryParam("dashboard_revision_id") - private String dashboardRevisionId; - - /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ - @JsonIgnore - @QueryParam("tokens") - private Collection tokens; - - public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) { - this.tokens = tokens; - return this; - } - - public Collection getTokens() { - return tokens; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, tokens); - } - - @Override - public String toString() { - return new ToStringer(CancelPublishedQueryExecutionRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("tokens", tokens) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java deleted file mode 100755 index d84d7214e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java +++ /dev/null @@ -1,83 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CancelQueryExecutionResponseStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("pending") - private Empty pending; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("success") - private Empty success; - - public CancelQueryExecutionResponseStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - public CancelQueryExecutionResponseStatus setPending(Empty pending) { - this.pending = pending; - return this; - } - - public Empty getPending() { - return pending; - } - - public CancelQueryExecutionResponseStatus setSuccess(Empty success) { - this.success = success; - return this; - } - - public Empty getSuccess() { - return success; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o; - return Objects.equals(dataToken, that.dataToken) - && Objects.equals(pending, that.pending) - && Objects.equals(success, that.success); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken, pending, success); - } - - @Override - public String toString() { - return new ToStringer(CancelQueryExecutionResponseStatus.class) - .add("dataToken", dataToken) - .add("pending", pending) - .add("success", success) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Converters.java new file mode 100755 index 000000000..9590477af --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.dashboards; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java index 9c6ddecd1..f404fb338 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create dashboard */ @Generated +@JsonSerialize(using = CreateDashboardRequest.CreateDashboardRequestSerializer.class) +@JsonDeserialize(using = CreateDashboardRequest.CreateDashboardRequestDeserializer.class) public class CreateDashboardRequest { /** */ - @JsonProperty("dashboard") private Dashboard dashboard; public CreateDashboardRequest setDashboard(Dashboard dashboard) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateDashboardRequest.class).add("dashboard", dashboard).toString(); } + + CreateDashboardRequestPb toPb() { + CreateDashboardRequestPb pb = new CreateDashboardRequestPb(); + pb.setDashboard(dashboard); + + return pb; + } + + static CreateDashboardRequest fromPb(CreateDashboardRequestPb pb) { + CreateDashboardRequest model = new CreateDashboardRequest(); + model.setDashboard(pb.getDashboard()); + + return model; + } + + public static class CreateDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public CreateDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDashboardRequestPb pb = mapper.readValue(p, CreateDashboardRequestPb.class); + return CreateDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequestPb.java new file mode 100755 index 000000000..447dd3f79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequestPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create dashboard */ +@Generated +class CreateDashboardRequestPb { + @JsonProperty("dashboard") + private Dashboard dashboard; + + public CreateDashboardRequestPb setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; + return this; + } + + public Dashboard getDashboard() { + return dashboard; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDashboardRequestPb that = (CreateDashboardRequestPb) o; + return Objects.equals(dashboard, that.dashboard); + } + + @Override + public int hashCode() { + return Objects.hash(dashboard); + } + + @Override + public String toString() { + return new ToStringer(CreateDashboardRequestPb.class).add("dashboard", dashboard).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java index 1c364865f..aba74e936 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create dashboard schedule */ @Generated +@JsonSerialize(using = CreateScheduleRequest.CreateScheduleRequestSerializer.class) +@JsonDeserialize(using = CreateScheduleRequest.CreateScheduleRequestDeserializer.class) public class CreateScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** */ - @JsonProperty("schedule") private Schedule schedule; public CreateScheduleRequest setDashboardId(String dashboardId) { @@ -56,4 +65,43 @@ public String toString() { .add("schedule", schedule) .toString(); } + + CreateScheduleRequestPb toPb() { + CreateScheduleRequestPb pb = new CreateScheduleRequestPb(); + pb.setDashboardId(dashboardId); + pb.setSchedule(schedule); + + return pb; + } + + static CreateScheduleRequest fromPb(CreateScheduleRequestPb pb) { + CreateScheduleRequest model = new CreateScheduleRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setSchedule(pb.getSchedule()); + + return model; + } + + public static class CreateScheduleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateScheduleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateScheduleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateScheduleRequestDeserializer + extends JsonDeserializer { + @Override + public CreateScheduleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateScheduleRequestPb pb = mapper.readValue(p, CreateScheduleRequestPb.class); + return CreateScheduleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequestPb.java new file mode 100755 index 000000000..893cc899e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create dashboard schedule */ +@Generated +class CreateScheduleRequestPb { + @JsonIgnore private String dashboardId; + + @JsonProperty("schedule") + private Schedule schedule; + + public CreateScheduleRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public CreateScheduleRequestPb setSchedule(Schedule schedule) { + this.schedule = schedule; + return this; + } + + public Schedule getSchedule() { + return schedule; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateScheduleRequestPb that = (CreateScheduleRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(schedule, that.schedule); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, schedule); + } + + @Override + public String toString() { + return new ToStringer(CreateScheduleRequestPb.class) + .add("dashboardId", dashboardId) + .add("schedule", schedule) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java index 66ce04221..0ba7f3218 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create schedule subscription */ @Generated +@JsonSerialize(using = CreateSubscriptionRequest.CreateSubscriptionRequestSerializer.class) +@JsonDeserialize(using = CreateSubscriptionRequest.CreateSubscriptionRequestDeserializer.class) public class CreateSubscriptionRequest { /** UUID identifying the dashboard to which the subscription belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** UUID identifying the schedule to which the subscription belongs. */ - @JsonIgnore private String scheduleId; + private String scheduleId; /** */ - @JsonProperty("subscription") private Subscription subscription; public CreateSubscriptionRequest setDashboardId(String dashboardId) { @@ -71,4 +80,45 @@ public String toString() { .add("subscription", subscription) .toString(); } + + CreateSubscriptionRequestPb toPb() { + CreateSubscriptionRequestPb pb = new CreateSubscriptionRequestPb(); + pb.setDashboardId(dashboardId); + pb.setScheduleId(scheduleId); + pb.setSubscription(subscription); + + return pb; + } + + static CreateSubscriptionRequest fromPb(CreateSubscriptionRequestPb pb) { + CreateSubscriptionRequest model = new CreateSubscriptionRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setScheduleId(pb.getScheduleId()); + model.setSubscription(pb.getSubscription()); + + return model; + } + + public static class CreateSubscriptionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateSubscriptionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateSubscriptionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateSubscriptionRequestDeserializer + extends JsonDeserializer { + @Override + public CreateSubscriptionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateSubscriptionRequestPb pb = mapper.readValue(p, CreateSubscriptionRequestPb.class); + return CreateSubscriptionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequestPb.java new file mode 100755 index 000000000..d3f775ddd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create schedule subscription */ +@Generated +class CreateSubscriptionRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore private String scheduleId; + + @JsonProperty("subscription") + private Subscription subscription; + + public CreateSubscriptionRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public CreateSubscriptionRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + public CreateSubscriptionRequestPb setSubscription(Subscription subscription) { + this.subscription = subscription; + return this; + } + + public Subscription getSubscription() { + return subscription; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSubscriptionRequestPb that = (CreateSubscriptionRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(scheduleId, that.scheduleId) + && Objects.equals(subscription, that.subscription); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, scheduleId, subscription); + } + + @Override + public String toString() { + return new ToStringer(CreateSubscriptionRequestPb.class) + .add("dashboardId", dashboardId) + .add("scheduleId", scheduleId) + .add("subscription", subscription) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java index db2332001..5cca18dd9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CronSchedule.CronScheduleSerializer.class) +@JsonDeserialize(using = CronSchedule.CronScheduleDeserializer.class) public class CronSchedule { /** * A cron expression using quartz syntax. EX: `0 0 8 * * ?` represents everyday at 8am. See [Cron @@ -16,7 +27,6 @@ public class CronSchedule { *

[Cron Trigger]: * http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html */ - @JsonProperty("quartz_cron_expression") private String quartzCronExpression; /** @@ -25,7 +35,6 @@ public class CronSchedule { * *

[Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html */ - @JsonProperty("timezone_id") private String timezoneId; public CronSchedule setQuartzCronExpression(String quartzCronExpression) { @@ -67,4 +76,39 @@ public String toString() { .add("timezoneId", timezoneId) .toString(); } + + CronSchedulePb toPb() { + CronSchedulePb pb = new CronSchedulePb(); + pb.setQuartzCronExpression(quartzCronExpression); + pb.setTimezoneId(timezoneId); + + return pb; + } + + static CronSchedule fromPb(CronSchedulePb pb) { + CronSchedule model = new CronSchedule(); + model.setQuartzCronExpression(pb.getQuartzCronExpression()); + model.setTimezoneId(pb.getTimezoneId()); + + return model; + } + + public static class CronScheduleSerializer extends JsonSerializer { + @Override + public void serialize(CronSchedule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CronSchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CronScheduleDeserializer extends JsonDeserializer { + @Override + public CronSchedule deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CronSchedulePb pb = mapper.readValue(p, CronSchedulePb.class); + return CronSchedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedulePb.java new file mode 100755 index 000000000..1d7017c3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedulePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CronSchedulePb { + @JsonProperty("quartz_cron_expression") + private String quartzCronExpression; + + @JsonProperty("timezone_id") + private String timezoneId; + + public CronSchedulePb setQuartzCronExpression(String quartzCronExpression) { + this.quartzCronExpression = quartzCronExpression; + return this; + } + + public String getQuartzCronExpression() { + return quartzCronExpression; + } + + public CronSchedulePb setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronSchedulePb that = (CronSchedulePb) o; + return Objects.equals(quartzCronExpression, that.quartzCronExpression) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(quartzCronExpression, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(CronSchedulePb.class) + .add("quartzCronExpression", quartzCronExpression) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java index d94cee027..be0dc661f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java @@ -4,46 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Dashboard.DashboardSerializer.class) +@JsonDeserialize(using = Dashboard.DashboardDeserializer.class) public class Dashboard { /** The timestamp of when the dashboard was created. */ - @JsonProperty("create_time") private String createTime; /** UUID identifying the dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The display name of the dashboard. */ - @JsonProperty("display_name") private String displayName; /** * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard * has not been modified since the last read. This field is excluded in List Dashboards responses. */ - @JsonProperty("etag") private String etag; /** The state of the dashboard resource. Used for tracking trashed status. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** * The workspace path of the folder containing the dashboard. Includes leading slash and no * trailing slash. This field is excluded in List Dashboards responses. */ - @JsonProperty("parent_path") private String parentPath; /** * The workspace path of the dashboard asset, including the file name. Exported dashboards always * have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses. */ - @JsonProperty("path") private String path; /** @@ -54,18 +58,15 @@ public class Dashboard { * *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ - @JsonProperty("serialized_dashboard") private String serializedDashboard; /** * The timestamp of when the dashboard was last updated by the user. This field is excluded in * List Dashboards responses. */ - @JsonProperty("update_time") private String updateTime; /** The warehouse ID used to run the dashboard. */ - @JsonProperty("warehouse_id") private String warehouseId; public Dashboard setCreateTime(String createTime) { @@ -205,4 +206,55 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + DashboardPb toPb() { + DashboardPb pb = new DashboardPb(); + pb.setCreateTime(createTime); + pb.setDashboardId(dashboardId); + pb.setDisplayName(displayName); + pb.setEtag(etag); + pb.setLifecycleState(lifecycleState); + pb.setParentPath(parentPath); + pb.setPath(path); + pb.setSerializedDashboard(serializedDashboard); + pb.setUpdateTime(updateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static Dashboard fromPb(DashboardPb pb) { + Dashboard model = new Dashboard(); + model.setCreateTime(pb.getCreateTime()); + model.setDashboardId(pb.getDashboardId()); + model.setDisplayName(pb.getDisplayName()); + model.setEtag(pb.getEtag()); + model.setLifecycleState(pb.getLifecycleState()); + model.setParentPath(pb.getParentPath()); + model.setPath(pb.getPath()); + model.setSerializedDashboard(pb.getSerializedDashboard()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class DashboardSerializer extends JsonSerializer { + @Override + public void serialize(Dashboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardDeserializer extends JsonDeserializer { + @Override + public Dashboard deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardPb pb = mapper.readValue(p, DashboardPb.class); + return Dashboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardPb.java new file mode 100755 index 000000000..90e9de840 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DashboardPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("path") + private String path; + + @JsonProperty("serialized_dashboard") + private String serializedDashboard; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public DashboardPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public DashboardPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DashboardPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public DashboardPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DashboardPb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public DashboardPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public DashboardPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public DashboardPb setSerializedDashboard(String serializedDashboard) { + this.serializedDashboard = serializedDashboard; + return this; + } + + public String getSerializedDashboard() { + return serializedDashboard; + } + + public DashboardPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public DashboardPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardPb that = (DashboardPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(etag, that.etag) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(path, that.path) + && Objects.equals(serializedDashboard, that.serializedDashboard) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + dashboardId, + displayName, + etag, + lifecycleState, + parentPath, + path, + serializedDashboard, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DashboardPb.class) + .add("createTime", createTime) + .add("dashboardId", dashboardId) + .add("displayName", displayName) + .add("etag", etag) + .add("lifecycleState", lifecycleState) + .add("parentPath", parentPath) + .add("path", path) + .add("serializedDashboard", serializedDashboard) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java index 9700498fe..f86289534 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java @@ -3,27 +3,35 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete dashboard schedule */ @Generated +@JsonSerialize(using = DeleteScheduleRequest.DeleteScheduleRequestSerializer.class) +@JsonDeserialize(using = DeleteScheduleRequest.DeleteScheduleRequestDeserializer.class) public class DeleteScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** * The etag for the schedule. Optionally, it can be provided to verify that the schedule has not * been modified from its last retrieval. */ - @JsonIgnore - @QueryParam("etag") private String etag; /** UUID identifying the schedule. */ - @JsonIgnore private String scheduleId; + private String scheduleId; public DeleteScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -75,4 +83,45 @@ public String toString() { .add("scheduleId", scheduleId) .toString(); } + + DeleteScheduleRequestPb toPb() { + DeleteScheduleRequestPb pb = new DeleteScheduleRequestPb(); + pb.setDashboardId(dashboardId); + pb.setEtag(etag); + pb.setScheduleId(scheduleId); + + return pb; + } + + static DeleteScheduleRequest fromPb(DeleteScheduleRequestPb pb) { + DeleteScheduleRequest model = new DeleteScheduleRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setEtag(pb.getEtag()); + model.setScheduleId(pb.getScheduleId()); + + return model; + } + + public static class DeleteScheduleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteScheduleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteScheduleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteScheduleRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteScheduleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteScheduleRequestPb pb = mapper.readValue(p, DeleteScheduleRequestPb.class); + return DeleteScheduleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequestPb.java new file mode 100755 index 000000000..b1042b709 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete dashboard schedule */ +@Generated +class DeleteScheduleRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore + @QueryParam("etag") + private String etag; + + @JsonIgnore private String scheduleId; + + public DeleteScheduleRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DeleteScheduleRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DeleteScheduleRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteScheduleRequestPb that = (DeleteScheduleRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(etag, that.etag) + && Objects.equals(scheduleId, that.scheduleId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, etag, scheduleId); + } + + @Override + public String toString() { + return new ToStringer(DeleteScheduleRequestPb.class) + .add("dashboardId", dashboardId) + .add("etag", etag) + .add("scheduleId", scheduleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java index f21eeb237..0a1587521 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteScheduleResponse.DeleteScheduleResponseSerializer.class) +@JsonDeserialize(using = DeleteScheduleResponse.DeleteScheduleResponseDeserializer.class) public class DeleteScheduleResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteScheduleResponse.class).toString(); } + + DeleteScheduleResponsePb toPb() { + DeleteScheduleResponsePb pb = new DeleteScheduleResponsePb(); + + return pb; + } + + static DeleteScheduleResponse fromPb(DeleteScheduleResponsePb pb) { + DeleteScheduleResponse model = new DeleteScheduleResponse(); + + return model; + } + + public static class DeleteScheduleResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteScheduleResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteScheduleResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteScheduleResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteScheduleResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteScheduleResponsePb pb = mapper.readValue(p, DeleteScheduleResponsePb.class); + return DeleteScheduleResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponsePb.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponsePb.java index 94f12df20..012929547 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponsePb.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class ExecuteQueryResponse { +class DeleteScheduleResponsePb { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ExecuteQueryResponse.class).toString(); + return new ToStringer(DeleteScheduleResponsePb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java index e69b3bc96..b37f56e64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java @@ -3,30 +3,38 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete schedule subscription */ @Generated +@JsonSerialize(using = DeleteSubscriptionRequest.DeleteSubscriptionRequestSerializer.class) +@JsonDeserialize(using = DeleteSubscriptionRequest.DeleteSubscriptionRequestDeserializer.class) public class DeleteSubscriptionRequest { /** UUID identifying the dashboard which the subscription belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** * The etag for the subscription. Can be optionally provided to ensure that the subscription has * not been modified since the last read. */ - @JsonIgnore - @QueryParam("etag") private String etag; /** UUID identifying the schedule which the subscription belongs. */ - @JsonIgnore private String scheduleId; + private String scheduleId; /** UUID identifying the subscription. */ - @JsonIgnore private String subscriptionId; + private String subscriptionId; public DeleteSubscriptionRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -89,4 +97,47 @@ public String toString() { .add("subscriptionId", subscriptionId) .toString(); } + + DeleteSubscriptionRequestPb toPb() { + DeleteSubscriptionRequestPb pb = new DeleteSubscriptionRequestPb(); + pb.setDashboardId(dashboardId); + pb.setEtag(etag); + pb.setScheduleId(scheduleId); + pb.setSubscriptionId(subscriptionId); + + return pb; + } + + static DeleteSubscriptionRequest fromPb(DeleteSubscriptionRequestPb pb) { + DeleteSubscriptionRequest model = new DeleteSubscriptionRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setEtag(pb.getEtag()); + model.setScheduleId(pb.getScheduleId()); + model.setSubscriptionId(pb.getSubscriptionId()); + + return model; + } + + public static class DeleteSubscriptionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSubscriptionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSubscriptionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSubscriptionRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteSubscriptionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSubscriptionRequestPb pb = mapper.readValue(p, DeleteSubscriptionRequestPb.class); + return DeleteSubscriptionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequestPb.java new file mode 100755 index 000000000..19a7930b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete schedule subscription */ +@Generated +class DeleteSubscriptionRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore + @QueryParam("etag") + private String etag; + + @JsonIgnore private String scheduleId; + + @JsonIgnore private String subscriptionId; + + public DeleteSubscriptionRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DeleteSubscriptionRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DeleteSubscriptionRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + public DeleteSubscriptionRequestPb setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSubscriptionRequestPb that = (DeleteSubscriptionRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(etag, that.etag) + && Objects.equals(scheduleId, that.scheduleId) + && Objects.equals(subscriptionId, that.subscriptionId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, etag, scheduleId, subscriptionId); + } + + @Override + public String toString() { + return new ToStringer(DeleteSubscriptionRequestPb.class) + .add("dashboardId", dashboardId) + .add("etag", etag) + .add("scheduleId", scheduleId) + .add("subscriptionId", subscriptionId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java index 6325c783a..777bafb3d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteSubscriptionResponse.DeleteSubscriptionResponseSerializer.class) +@JsonDeserialize(using = DeleteSubscriptionResponse.DeleteSubscriptionResponseDeserializer.class) public class DeleteSubscriptionResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteSubscriptionResponse.class).toString(); } + + DeleteSubscriptionResponsePb toPb() { + DeleteSubscriptionResponsePb pb = new DeleteSubscriptionResponsePb(); + + return pb; + } + + static DeleteSubscriptionResponse fromPb(DeleteSubscriptionResponsePb pb) { + DeleteSubscriptionResponse model = new DeleteSubscriptionResponse(); + + return model; + } + + public static class DeleteSubscriptionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSubscriptionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSubscriptionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSubscriptionResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteSubscriptionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSubscriptionResponsePb pb = mapper.readValue(p, DeleteSubscriptionResponsePb.class); + return DeleteSubscriptionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponsePb.java similarity index 79% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponsePb.java index 5aefc388e..89b2cafff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponsePb.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class GetPublishedDashboardEmbeddedResponse { +class DeleteSubscriptionResponsePb { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetPublishedDashboardEmbeddedResponse.class).toString(); + return new ToStringer(DeleteSubscriptionResponsePb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java deleted file mode 100755 index c5223007c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java +++ /dev/null @@ -1,86 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -/** - * Execute query request for published Dashboards. Since published dashboards have the option of - * running as the publisher, the datasets, warehouse_id are excluded from the request and instead - * read from the source (lakeview-config) via the additional parameters (dashboardName and - * dashboardRevisionId) - */ -@Generated -public class ExecutePublishedDashboardQueryRequest { - /** - * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains - * the list of datasets, warehouse_id, and embedded_credentials - */ - @JsonProperty("dashboard_name") - private String dashboardName; - - /** */ - @JsonProperty("dashboard_revision_id") - private String dashboardRevisionId; - - /** - * A dashboard schedule can override the warehouse used as compute for processing the published - * dashboard queries - */ - @JsonProperty("override_warehouse_id") - private String overrideWarehouseId; - - public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) { - this.overrideWarehouseId = overrideWarehouseId; - return this; - } - - public String getOverrideWarehouseId() { - return overrideWarehouseId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(overrideWarehouseId, that.overrideWarehouseId); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId); - } - - @Override - public String toString() { - return new ToStringer(ExecutePublishedDashboardQueryRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("overrideWarehouseId", overrideWarehouseId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index dfeb33522..c06edace8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -310,6 +310,15 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { return impl.getSpace(request); } + /** + * List Genie spaces. + * + *

Get list of Genie Spaces. + */ + public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { + return impl.listSpaces(request); + } + public Wait startConversation( String spaceId, String content) { return startConversation( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java index 588559a03..e72b23f6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Genie AI Response */ @Generated +@JsonSerialize(using = GenieAttachment.GenieAttachmentSerializer.class) +@JsonDeserialize(using = GenieAttachment.GenieAttachmentDeserializer.class) public class GenieAttachment { /** Attachment ID */ - @JsonProperty("attachment_id") private String attachmentId; /** Query Attachment if Genie responds with a SQL query */ - @JsonProperty("query") private GenieQueryAttachment query; /** Text Attachment if Genie responds with text */ - @JsonProperty("text") private TextAttachment text; public GenieAttachment setAttachmentId(String attachmentId) { @@ -72,4 +80,42 @@ public String toString() { .add("text", text) .toString(); } + + GenieAttachmentPb toPb() { + GenieAttachmentPb pb = new GenieAttachmentPb(); + pb.setAttachmentId(attachmentId); + pb.setQuery(query); + pb.setText(text); + + return pb; + } + + static GenieAttachment fromPb(GenieAttachmentPb pb) { + GenieAttachment model = new GenieAttachment(); + model.setAttachmentId(pb.getAttachmentId()); + model.setQuery(pb.getQuery()); + model.setText(pb.getText()); + + return model; + } + + public static class GenieAttachmentSerializer extends JsonSerializer { + @Override + public void serialize(GenieAttachment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieAttachmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieAttachmentDeserializer extends JsonDeserializer { + @Override + public GenieAttachment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieAttachmentPb pb = mapper.readValue(p, GenieAttachmentPb.class); + return GenieAttachment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachmentPb.java new file mode 100755 index 000000000..5903ff90f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachmentPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Genie AI Response */ +@Generated +class GenieAttachmentPb { + @JsonProperty("attachment_id") + private String attachmentId; + + @JsonProperty("query") + private GenieQueryAttachment query; + + @JsonProperty("text") + private TextAttachment text; + + public GenieAttachmentPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieAttachmentPb setQuery(GenieQueryAttachment query) { + this.query = query; + return this; + } + + public GenieQueryAttachment getQuery() { + return query; + } + + public GenieAttachmentPb setText(TextAttachment text) { + this.text = text; + return this; + } + + public TextAttachment getText() { + return text; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieAttachmentPb that = (GenieAttachmentPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(query, that.query) + && Objects.equals(text, that.text); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, query, text); + } + + @Override + public String toString() { + return new ToStringer(GenieAttachmentPb.class) + .add("attachmentId", attachmentId) + .add("query", query) + .add("text", text) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java index 46c2ebd64..127caf8c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenieConversation.GenieConversationSerializer.class) +@JsonDeserialize(using = GenieConversation.GenieConversationDeserializer.class) public class GenieConversation { /** Conversation ID */ - @JsonProperty("conversation_id") private String conversationId; /** Timestamp when the message was created */ - @JsonProperty("created_timestamp") private Long createdTimestamp; /** Conversation ID. Legacy identifier, use conversation_id instead */ - @JsonProperty("id") private String id; /** Timestamp when the message was last updated */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Genie space ID */ - @JsonProperty("space_id") private String spaceId; /** Conversation title */ - @JsonProperty("title") private String title; /** ID of the user who created the conversation */ - @JsonProperty("user_id") private Long userId; public GenieConversation setConversationId(String conversationId) { @@ -132,4 +136,50 @@ public String toString() { .add("userId", userId) .toString(); } + + GenieConversationPb toPb() { + GenieConversationPb pb = new GenieConversationPb(); + pb.setConversationId(conversationId); + pb.setCreatedTimestamp(createdTimestamp); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setSpaceId(spaceId); + pb.setTitle(title); + pb.setUserId(userId); + + return pb; + } + + static GenieConversation fromPb(GenieConversationPb pb) { + GenieConversation model = new GenieConversation(); + model.setConversationId(pb.getConversationId()); + model.setCreatedTimestamp(pb.getCreatedTimestamp()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setSpaceId(pb.getSpaceId()); + model.setTitle(pb.getTitle()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class GenieConversationSerializer extends JsonSerializer { + @Override + public void serialize(GenieConversation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieConversationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieConversationDeserializer extends JsonDeserializer { + @Override + public GenieConversation deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieConversationPb pb = mapper.readValue(p, GenieConversationPb.class); + return GenieConversation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationPb.java new file mode 100755 index 000000000..c8442e2e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieConversationPb { + @JsonProperty("conversation_id") + private String conversationId; + + @JsonProperty("created_timestamp") + private Long createdTimestamp; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("space_id") + private String spaceId; + + @JsonProperty("title") + private String title; + + @JsonProperty("user_id") + private Long userId; + + public GenieConversationPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieConversationPb setCreatedTimestamp(Long createdTimestamp) { + this.createdTimestamp = createdTimestamp; + return this; + } + + public Long getCreatedTimestamp() { + return createdTimestamp; + } + + public GenieConversationPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GenieConversationPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public GenieConversationPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieConversationPb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + public GenieConversationPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieConversationPb that = (GenieConversationPb) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(createdTimestamp, that.createdTimestamp) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(title, that.title) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + conversationId, createdTimestamp, id, lastUpdatedTimestamp, spaceId, title, userId); + } + + @Override + public String toString() { + return new ToStringer(GenieConversationPb.class) + .add("conversationId", conversationId) + .add("createdTimestamp", createdTimestamp) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("spaceId", spaceId) + .add("title", title) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java index 0628a2e08..5c75c6652 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenieCreateConversationMessageRequest.GenieCreateConversationMessageRequestSerializer.class) +@JsonDeserialize( + using = + GenieCreateConversationMessageRequest.GenieCreateConversationMessageRequestDeserializer + .class) public class GenieCreateConversationMessageRequest { /** User message content. */ - @JsonProperty("content") private String content; /** The ID associated with the conversation. */ - @JsonIgnore private String conversationId; + private String conversationId; /** The ID associated with the Genie space where the conversation is started. */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieCreateConversationMessageRequest setContent(String content) { this.content = content; @@ -70,4 +84,46 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieCreateConversationMessageRequestPb toPb() { + GenieCreateConversationMessageRequestPb pb = new GenieCreateConversationMessageRequestPb(); + pb.setContent(content); + pb.setConversationId(conversationId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieCreateConversationMessageRequest fromPb(GenieCreateConversationMessageRequestPb pb) { + GenieCreateConversationMessageRequest model = new GenieCreateConversationMessageRequest(); + model.setContent(pb.getContent()); + model.setConversationId(pb.getConversationId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieCreateConversationMessageRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieCreateConversationMessageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieCreateConversationMessageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieCreateConversationMessageRequestDeserializer + extends JsonDeserializer { + @Override + public GenieCreateConversationMessageRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieCreateConversationMessageRequestPb pb = + mapper.readValue(p, GenieCreateConversationMessageRequestPb.class); + return GenieCreateConversationMessageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequestPb.java new file mode 100755 index 000000000..137babfd2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequestPb.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieCreateConversationMessageRequestPb { + @JsonProperty("content") + private String content; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String spaceId; + + public GenieCreateConversationMessageRequestPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public GenieCreateConversationMessageRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieCreateConversationMessageRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieCreateConversationMessageRequestPb that = (GenieCreateConversationMessageRequestPb) o; + return Objects.equals(content, that.content) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(content, conversationId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieCreateConversationMessageRequestPb.class) + .add("content", content) + .add("conversationId", conversationId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java index 45fcf1257..5ad544ead 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java @@ -4,23 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Execute message attachment SQL query */ @Generated +@JsonSerialize( + using = + GenieExecuteMessageAttachmentQueryRequest + .GenieExecuteMessageAttachmentQueryRequestSerializer.class) +@JsonDeserialize( + using = + GenieExecuteMessageAttachmentQueryRequest + .GenieExecuteMessageAttachmentQueryRequestDeserializer.class) public class GenieExecuteMessageAttachmentQueryRequest { /** Attachment ID */ - @JsonIgnore private String attachmentId; + private String attachmentId; /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieExecuteMessageAttachmentQueryRequest setAttachmentId(String attachmentId) { this.attachmentId = attachmentId; @@ -83,4 +100,53 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieExecuteMessageAttachmentQueryRequestPb toPb() { + GenieExecuteMessageAttachmentQueryRequestPb pb = + new GenieExecuteMessageAttachmentQueryRequestPb(); + pb.setAttachmentId(attachmentId); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieExecuteMessageAttachmentQueryRequest fromPb( + GenieExecuteMessageAttachmentQueryRequestPb pb) { + GenieExecuteMessageAttachmentQueryRequest model = + new GenieExecuteMessageAttachmentQueryRequest(); + model.setAttachmentId(pb.getAttachmentId()); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieExecuteMessageAttachmentQueryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieExecuteMessageAttachmentQueryRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieExecuteMessageAttachmentQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieExecuteMessageAttachmentQueryRequestDeserializer + extends JsonDeserializer { + @Override + public GenieExecuteMessageAttachmentQueryRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieExecuteMessageAttachmentQueryRequestPb pb = + mapper.readValue(p, GenieExecuteMessageAttachmentQueryRequestPb.class); + return GenieExecuteMessageAttachmentQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequestPb.java new file mode 100755 index 000000000..f1d23fc42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequestPb.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Execute message attachment SQL query */ +@Generated +class GenieExecuteMessageAttachmentQueryRequestPb { + @JsonIgnore private String attachmentId; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieExecuteMessageAttachmentQueryRequestPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieExecuteMessageAttachmentQueryRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieExecuteMessageAttachmentQueryRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieExecuteMessageAttachmentQueryRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieExecuteMessageAttachmentQueryRequestPb that = + (GenieExecuteMessageAttachmentQueryRequestPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieExecuteMessageAttachmentQueryRequestPb.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java index 52305df74..0995523a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java @@ -4,20 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** [Deprecated] Execute SQL query in a conversation message */ @Generated +@JsonSerialize( + using = GenieExecuteMessageQueryRequest.GenieExecuteMessageQueryRequestSerializer.class) +@JsonDeserialize( + using = GenieExecuteMessageQueryRequest.GenieExecuteMessageQueryRequestDeserializer.class) public class GenieExecuteMessageQueryRequest { /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieExecuteMessageQueryRequest setConversationId(String conversationId) { this.conversationId = conversationId; @@ -69,4 +82,46 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieExecuteMessageQueryRequestPb toPb() { + GenieExecuteMessageQueryRequestPb pb = new GenieExecuteMessageQueryRequestPb(); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieExecuteMessageQueryRequest fromPb(GenieExecuteMessageQueryRequestPb pb) { + GenieExecuteMessageQueryRequest model = new GenieExecuteMessageQueryRequest(); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieExecuteMessageQueryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieExecuteMessageQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieExecuteMessageQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieExecuteMessageQueryRequestDeserializer + extends JsonDeserializer { + @Override + public GenieExecuteMessageQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieExecuteMessageQueryRequestPb pb = + mapper.readValue(p, GenieExecuteMessageQueryRequestPb.class); + return GenieExecuteMessageQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequestPb.java new file mode 100755 index 000000000..38fc4353e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** [Deprecated] Execute SQL query in a conversation message */ +@Generated +class GenieExecuteMessageQueryRequestPb { + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieExecuteMessageQueryRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieExecuteMessageQueryRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieExecuteMessageQueryRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieExecuteMessageQueryRequestPb that = (GenieExecuteMessageQueryRequestPb) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieExecuteMessageQueryRequestPb.class) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java index a4a38dadf..c0d478b9b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java @@ -4,23 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Generate full query result download */ @Generated +@JsonSerialize( + using = + GenieGenerateDownloadFullQueryResultRequest + .GenieGenerateDownloadFullQueryResultRequestSerializer.class) +@JsonDeserialize( + using = + GenieGenerateDownloadFullQueryResultRequest + .GenieGenerateDownloadFullQueryResultRequestDeserializer.class) public class GenieGenerateDownloadFullQueryResultRequest { /** Attachment ID */ - @JsonIgnore private String attachmentId; + private String attachmentId; /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGenerateDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { this.attachmentId = attachmentId; @@ -84,4 +101,53 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGenerateDownloadFullQueryResultRequestPb toPb() { + GenieGenerateDownloadFullQueryResultRequestPb pb = + new GenieGenerateDownloadFullQueryResultRequestPb(); + pb.setAttachmentId(attachmentId); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGenerateDownloadFullQueryResultRequest fromPb( + GenieGenerateDownloadFullQueryResultRequestPb pb) { + GenieGenerateDownloadFullQueryResultRequest model = + new GenieGenerateDownloadFullQueryResultRequest(); + model.setAttachmentId(pb.getAttachmentId()); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGenerateDownloadFullQueryResultRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGenerateDownloadFullQueryResultRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGenerateDownloadFullQueryResultRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGenerateDownloadFullQueryResultRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGenerateDownloadFullQueryResultRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGenerateDownloadFullQueryResultRequestPb pb = + mapper.readValue(p, GenieGenerateDownloadFullQueryResultRequestPb.class); + return GenieGenerateDownloadFullQueryResultRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequestPb.java new file mode 100755 index 000000000..24333f50f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequestPb.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Generate full query result download */ +@Generated +class GenieGenerateDownloadFullQueryResultRequestPb { + @JsonIgnore private String attachmentId; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGenerateDownloadFullQueryResultRequestPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGenerateDownloadFullQueryResultRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGenerateDownloadFullQueryResultRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGenerateDownloadFullQueryResultRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultRequestPb that = + (GenieGenerateDownloadFullQueryResultRequestPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultRequestPb.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java index e51751c8b..c3b13dc88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java @@ -4,13 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenieGenerateDownloadFullQueryResultResponse + .GenieGenerateDownloadFullQueryResultResponseSerializer.class) +@JsonDeserialize( + using = + GenieGenerateDownloadFullQueryResultResponse + .GenieGenerateDownloadFullQueryResultResponseDeserializer.class) public class GenieGenerateDownloadFullQueryResultResponse { /** Download ID. Use this ID to track the download request in subsequent polling calls */ - @JsonProperty("download_id") private String downloadId; public GenieGenerateDownloadFullQueryResultResponse setDownloadId(String downloadId) { @@ -42,4 +58,47 @@ public String toString() { .add("downloadId", downloadId) .toString(); } + + GenieGenerateDownloadFullQueryResultResponsePb toPb() { + GenieGenerateDownloadFullQueryResultResponsePb pb = + new GenieGenerateDownloadFullQueryResultResponsePb(); + pb.setDownloadId(downloadId); + + return pb; + } + + static GenieGenerateDownloadFullQueryResultResponse fromPb( + GenieGenerateDownloadFullQueryResultResponsePb pb) { + GenieGenerateDownloadFullQueryResultResponse model = + new GenieGenerateDownloadFullQueryResultResponse(); + model.setDownloadId(pb.getDownloadId()); + + return model; + } + + public static class GenieGenerateDownloadFullQueryResultResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGenerateDownloadFullQueryResultResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGenerateDownloadFullQueryResultResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGenerateDownloadFullQueryResultResponseDeserializer + extends JsonDeserializer { + @Override + public GenieGenerateDownloadFullQueryResultResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGenerateDownloadFullQueryResultResponsePb pb = + mapper.readValue(p, GenieGenerateDownloadFullQueryResultResponsePb.class); + return GenieGenerateDownloadFullQueryResultResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponsePb.java new file mode 100755 index 000000000..23b6c0fc4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieGenerateDownloadFullQueryResultResponsePb { + @JsonProperty("download_id") + private String downloadId; + + public GenieGenerateDownloadFullQueryResultResponsePb setDownloadId(String downloadId) { + this.downloadId = downloadId; + return this; + } + + public String getDownloadId() { + return downloadId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultResponsePb that = + (GenieGenerateDownloadFullQueryResultResponsePb) o; + return Objects.equals(downloadId, that.downloadId); + } + + @Override + public int hashCode() { + return Objects.hash(downloadId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultResponsePb.class) + .add("downloadId", downloadId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java index fd3c14ae6..e7a4b7e5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java @@ -4,20 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get conversation message */ @Generated +@JsonSerialize( + using = GenieGetConversationMessageRequest.GenieGetConversationMessageRequestSerializer.class) +@JsonDeserialize( + using = GenieGetConversationMessageRequest.GenieGetConversationMessageRequestDeserializer.class) public class GenieGetConversationMessageRequest { /** The ID associated with the target conversation. */ - @JsonIgnore private String conversationId; + private String conversationId; /** The ID associated with the target message from the identified conversation. */ - @JsonIgnore private String messageId; + private String messageId; /** The ID associated with the Genie space where the target conversation is located. */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetConversationMessageRequest setConversationId(String conversationId) { this.conversationId = conversationId; @@ -69,4 +82,46 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGetConversationMessageRequestPb toPb() { + GenieGetConversationMessageRequestPb pb = new GenieGetConversationMessageRequestPb(); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetConversationMessageRequest fromPb(GenieGetConversationMessageRequestPb pb) { + GenieGetConversationMessageRequest model = new GenieGetConversationMessageRequest(); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetConversationMessageRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetConversationMessageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieGetConversationMessageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetConversationMessageRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetConversationMessageRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetConversationMessageRequestPb pb = + mapper.readValue(p, GenieGetConversationMessageRequestPb.class); + return GenieGetConversationMessageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequestPb.java new file mode 100755 index 000000000..e196aaf48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get conversation message */ +@Generated +class GenieGetConversationMessageRequestPb { + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGetConversationMessageRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetConversationMessageRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetConversationMessageRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetConversationMessageRequestPb that = (GenieGetConversationMessageRequestPb) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetConversationMessageRequestPb.class) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java index c3f1cf2e4..bd173acf2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java @@ -4,29 +4,46 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get download full query result */ @Generated +@JsonSerialize( + using = + GenieGetDownloadFullQueryResultRequest.GenieGetDownloadFullQueryResultRequestSerializer + .class) +@JsonDeserialize( + using = + GenieGetDownloadFullQueryResultRequest.GenieGetDownloadFullQueryResultRequestDeserializer + .class) public class GenieGetDownloadFullQueryResultRequest { /** Attachment ID */ - @JsonIgnore private String attachmentId; + private String attachmentId; /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** * Download ID. This ID is provided by the [Generate Download * endpoint](:method:genie/generateDownloadFullQueryResult) */ - @JsonIgnore private String downloadId; + private String downloadId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { this.attachmentId = attachmentId; @@ -100,4 +117,53 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGetDownloadFullQueryResultRequestPb toPb() { + GenieGetDownloadFullQueryResultRequestPb pb = new GenieGetDownloadFullQueryResultRequestPb(); + pb.setAttachmentId(attachmentId); + pb.setConversationId(conversationId); + pb.setDownloadId(downloadId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetDownloadFullQueryResultRequest fromPb( + GenieGetDownloadFullQueryResultRequestPb pb) { + GenieGetDownloadFullQueryResultRequest model = new GenieGetDownloadFullQueryResultRequest(); + model.setAttachmentId(pb.getAttachmentId()); + model.setConversationId(pb.getConversationId()); + model.setDownloadId(pb.getDownloadId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetDownloadFullQueryResultRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetDownloadFullQueryResultRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGetDownloadFullQueryResultRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetDownloadFullQueryResultRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetDownloadFullQueryResultRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetDownloadFullQueryResultRequestPb pb = + mapper.readValue(p, GenieGetDownloadFullQueryResultRequestPb.class); + return GenieGetDownloadFullQueryResultRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequestPb.java new file mode 100755 index 000000000..7da2e12d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequestPb.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get download full query result */ +@Generated +class GenieGetDownloadFullQueryResultRequestPb { + @JsonIgnore private String attachmentId; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String downloadId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGetDownloadFullQueryResultRequestPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetDownloadFullQueryResultRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetDownloadFullQueryResultRequestPb setDownloadId(String downloadId) { + this.downloadId = downloadId; + return this; + } + + public String getDownloadId() { + return downloadId; + } + + public GenieGetDownloadFullQueryResultRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetDownloadFullQueryResultRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultRequestPb that = (GenieGetDownloadFullQueryResultRequestPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(downloadId, that.downloadId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, downloadId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultRequestPb.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("downloadId", downloadId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java index 490c5c518..8c9436f86 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java @@ -4,16 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenieGetDownloadFullQueryResultResponse.GenieGetDownloadFullQueryResultResponseSerializer + .class) +@JsonDeserialize( + using = + GenieGetDownloadFullQueryResultResponse.GenieGetDownloadFullQueryResultResponseDeserializer + .class) public class GenieGetDownloadFullQueryResultResponse { /** * SQL Statement Execution response. See [Get status, manifest, and result first * chunk](:method:statementexecution/getstatement) for more details. */ - @JsonProperty("statement_response") private com.databricks.sdk.service.sql.StatementResponse statementResponse; public GenieGetDownloadFullQueryResultResponse setStatementResponse( @@ -45,4 +61,45 @@ public String toString() { .add("statementResponse", statementResponse) .toString(); } + + GenieGetDownloadFullQueryResultResponsePb toPb() { + GenieGetDownloadFullQueryResultResponsePb pb = new GenieGetDownloadFullQueryResultResponsePb(); + pb.setStatementResponse(statementResponse); + + return pb; + } + + static GenieGetDownloadFullQueryResultResponse fromPb( + GenieGetDownloadFullQueryResultResponsePb pb) { + GenieGetDownloadFullQueryResultResponse model = new GenieGetDownloadFullQueryResultResponse(); + model.setStatementResponse(pb.getStatementResponse()); + + return model; + } + + public static class GenieGetDownloadFullQueryResultResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetDownloadFullQueryResultResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGetDownloadFullQueryResultResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetDownloadFullQueryResultResponseDeserializer + extends JsonDeserializer { + @Override + public GenieGetDownloadFullQueryResultResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetDownloadFullQueryResultResponsePb pb = + mapper.readValue(p, GenieGetDownloadFullQueryResultResponsePb.class); + return GenieGetDownloadFullQueryResultResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponsePb.java new file mode 100755 index 000000000..8a7f58b11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieGetDownloadFullQueryResultResponsePb { + @JsonProperty("statement_response") + private com.databricks.sdk.service.sql.StatementResponse statementResponse; + + public GenieGetDownloadFullQueryResultResponsePb setStatementResponse( + com.databricks.sdk.service.sql.StatementResponse statementResponse) { + this.statementResponse = statementResponse; + return this; + } + + public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() { + return statementResponse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultResponsePb that = (GenieGetDownloadFullQueryResultResponsePb) o; + return Objects.equals(statementResponse, that.statementResponse); + } + + @Override + public int hashCode() { + return Objects.hash(statementResponse); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultResponsePb.class) + .add("statementResponse", statementResponse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java index b832faa07..bb5d7e1ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java @@ -4,23 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get message attachment SQL query result */ @Generated +@JsonSerialize( + using = + GenieGetMessageAttachmentQueryResultRequest + .GenieGetMessageAttachmentQueryResultRequestSerializer.class) +@JsonDeserialize( + using = + GenieGetMessageAttachmentQueryResultRequest + .GenieGetMessageAttachmentQueryResultRequestDeserializer.class) public class GenieGetMessageAttachmentQueryResultRequest { /** Attachment ID */ - @JsonIgnore private String attachmentId; + private String attachmentId; /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetMessageAttachmentQueryResultRequest setAttachmentId(String attachmentId) { this.attachmentId = attachmentId; @@ -84,4 +101,53 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGetMessageAttachmentQueryResultRequestPb toPb() { + GenieGetMessageAttachmentQueryResultRequestPb pb = + new GenieGetMessageAttachmentQueryResultRequestPb(); + pb.setAttachmentId(attachmentId); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetMessageAttachmentQueryResultRequest fromPb( + GenieGetMessageAttachmentQueryResultRequestPb pb) { + GenieGetMessageAttachmentQueryResultRequest model = + new GenieGetMessageAttachmentQueryResultRequest(); + model.setAttachmentId(pb.getAttachmentId()); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetMessageAttachmentQueryResultRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetMessageAttachmentQueryResultRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGetMessageAttachmentQueryResultRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetMessageAttachmentQueryResultRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetMessageAttachmentQueryResultRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetMessageAttachmentQueryResultRequestPb pb = + mapper.readValue(p, GenieGetMessageAttachmentQueryResultRequestPb.class); + return GenieGetMessageAttachmentQueryResultRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequestPb.java new file mode 100755 index 000000000..2694e5c88 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequestPb.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get message attachment SQL query result */ +@Generated +class GenieGetMessageAttachmentQueryResultRequestPb { + @JsonIgnore private String attachmentId; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGetMessageAttachmentQueryResultRequestPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetMessageAttachmentQueryResultRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetMessageAttachmentQueryResultRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetMessageAttachmentQueryResultRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetMessageAttachmentQueryResultRequestPb that = + (GenieGetMessageAttachmentQueryResultRequestPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetMessageAttachmentQueryResultRequestPb.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java index a65e75683..8ea93a96d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java @@ -4,20 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** [Deprecated] Get conversation message SQL query result */ @Generated +@JsonSerialize( + using = GenieGetMessageQueryResultRequest.GenieGetMessageQueryResultRequestSerializer.class) +@JsonDeserialize( + using = GenieGetMessageQueryResultRequest.GenieGetMessageQueryResultRequestDeserializer.class) public class GenieGetMessageQueryResultRequest { /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetMessageQueryResultRequest setConversationId(String conversationId) { this.conversationId = conversationId; @@ -69,4 +82,46 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGetMessageQueryResultRequestPb toPb() { + GenieGetMessageQueryResultRequestPb pb = new GenieGetMessageQueryResultRequestPb(); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetMessageQueryResultRequest fromPb(GenieGetMessageQueryResultRequestPb pb) { + GenieGetMessageQueryResultRequest model = new GenieGetMessageQueryResultRequest(); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetMessageQueryResultRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetMessageQueryResultRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieGetMessageQueryResultRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetMessageQueryResultRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetMessageQueryResultRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetMessageQueryResultRequestPb pb = + mapper.readValue(p, GenieGetMessageQueryResultRequestPb.class); + return GenieGetMessageQueryResultRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequestPb.java new file mode 100755 index 000000000..62c7f7970 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** [Deprecated] Get conversation message SQL query result */ +@Generated +class GenieGetMessageQueryResultRequestPb { + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGetMessageQueryResultRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetMessageQueryResultRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetMessageQueryResultRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetMessageQueryResultRequestPb that = (GenieGetMessageQueryResultRequestPb) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetMessageQueryResultRequestPb.class) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java index b63de6899..31271c0fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java @@ -4,16 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GenieGetMessageQueryResultResponse.GenieGetMessageQueryResultResponseSerializer.class) +@JsonDeserialize( + using = GenieGetMessageQueryResultResponse.GenieGetMessageQueryResultResponseDeserializer.class) public class GenieGetMessageQueryResultResponse { /** * SQL Statement Execution response. See [Get status, manifest, and result first * chunk](:method:statementexecution/getstatement) for more details. */ - @JsonProperty("statement_response") private com.databricks.sdk.service.sql.StatementResponse statementResponse; public GenieGetMessageQueryResultResponse setStatementResponse( @@ -45,4 +57,42 @@ public String toString() { .add("statementResponse", statementResponse) .toString(); } + + GenieGetMessageQueryResultResponsePb toPb() { + GenieGetMessageQueryResultResponsePb pb = new GenieGetMessageQueryResultResponsePb(); + pb.setStatementResponse(statementResponse); + + return pb; + } + + static GenieGetMessageQueryResultResponse fromPb(GenieGetMessageQueryResultResponsePb pb) { + GenieGetMessageQueryResultResponse model = new GenieGetMessageQueryResultResponse(); + model.setStatementResponse(pb.getStatementResponse()); + + return model; + } + + public static class GenieGetMessageQueryResultResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetMessageQueryResultResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieGetMessageQueryResultResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetMessageQueryResultResponseDeserializer + extends JsonDeserializer { + @Override + public GenieGetMessageQueryResultResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetMessageQueryResultResponsePb pb = + mapper.readValue(p, GenieGetMessageQueryResultResponsePb.class); + return GenieGetMessageQueryResultResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponsePb.java new file mode 100755 index 000000000..fae868ba2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieGetMessageQueryResultResponsePb { + @JsonProperty("statement_response") + private com.databricks.sdk.service.sql.StatementResponse statementResponse; + + public GenieGetMessageQueryResultResponsePb setStatementResponse( + com.databricks.sdk.service.sql.StatementResponse statementResponse) { + this.statementResponse = statementResponse; + return this; + } + + public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() { + return statementResponse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetMessageQueryResultResponsePb that = (GenieGetMessageQueryResultResponsePb) o; + return Objects.equals(statementResponse, that.statementResponse); + } + + @Override + public int hashCode() { + return Objects.hash(statementResponse); + } + + @Override + public String toString() { + return new ToStringer(GenieGetMessageQueryResultResponsePb.class) + .add("statementResponse", statementResponse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java index 588fb6e71..6d331df66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java @@ -4,23 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** [Deprecated] Get conversation message SQL query result */ @Generated +@JsonSerialize( + using = + GenieGetQueryResultByAttachmentRequest.GenieGetQueryResultByAttachmentRequestSerializer + .class) +@JsonDeserialize( + using = + GenieGetQueryResultByAttachmentRequest.GenieGetQueryResultByAttachmentRequestDeserializer + .class) public class GenieGetQueryResultByAttachmentRequest { /** Attachment ID */ - @JsonIgnore private String attachmentId; + private String attachmentId; /** Conversation ID */ - @JsonIgnore private String conversationId; + private String conversationId; /** Message ID */ - @JsonIgnore private String messageId; + private String messageId; /** Genie space ID */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetQueryResultByAttachmentRequest setAttachmentId(String attachmentId) { this.attachmentId = attachmentId; @@ -83,4 +100,51 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieGetQueryResultByAttachmentRequestPb toPb() { + GenieGetQueryResultByAttachmentRequestPb pb = new GenieGetQueryResultByAttachmentRequestPb(); + pb.setAttachmentId(attachmentId); + pb.setConversationId(conversationId); + pb.setMessageId(messageId); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetQueryResultByAttachmentRequest fromPb( + GenieGetQueryResultByAttachmentRequestPb pb) { + GenieGetQueryResultByAttachmentRequest model = new GenieGetQueryResultByAttachmentRequest(); + model.setAttachmentId(pb.getAttachmentId()); + model.setConversationId(pb.getConversationId()); + model.setMessageId(pb.getMessageId()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetQueryResultByAttachmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieGetQueryResultByAttachmentRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GenieGetQueryResultByAttachmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetQueryResultByAttachmentRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetQueryResultByAttachmentRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetQueryResultByAttachmentRequestPb pb = + mapper.readValue(p, GenieGetQueryResultByAttachmentRequestPb.class); + return GenieGetQueryResultByAttachmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequestPb.java new file mode 100755 index 000000000..3dbdf56fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequestPb.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** [Deprecated] Get conversation message SQL query result */ +@Generated +class GenieGetQueryResultByAttachmentRequestPb { + @JsonIgnore private String attachmentId; + + @JsonIgnore private String conversationId; + + @JsonIgnore private String messageId; + + @JsonIgnore private String spaceId; + + public GenieGetQueryResultByAttachmentRequestPb setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetQueryResultByAttachmentRequestPb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetQueryResultByAttachmentRequestPb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetQueryResultByAttachmentRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetQueryResultByAttachmentRequestPb that = (GenieGetQueryResultByAttachmentRequestPb) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetQueryResultByAttachmentRequestPb.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java index d259b43df..567cc2ae4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get Genie Space */ @Generated +@JsonSerialize(using = GenieGetSpaceRequest.GenieGetSpaceRequestSerializer.class) +@JsonDeserialize(using = GenieGetSpaceRequest.GenieGetSpaceRequestDeserializer.class) public class GenieGetSpaceRequest { /** The ID associated with the Genie space */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieGetSpaceRequest setSpaceId(String spaceId) { this.spaceId = spaceId; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(GenieGetSpaceRequest.class).add("spaceId", spaceId).toString(); } + + GenieGetSpaceRequestPb toPb() { + GenieGetSpaceRequestPb pb = new GenieGetSpaceRequestPb(); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieGetSpaceRequest fromPb(GenieGetSpaceRequestPb pb) { + GenieGetSpaceRequest model = new GenieGetSpaceRequest(); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieGetSpaceRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GenieGetSpaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieGetSpaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieGetSpaceRequestDeserializer + extends JsonDeserializer { + @Override + public GenieGetSpaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieGetSpaceRequestPb pb = mapper.readValue(p, GenieGetSpaceRequestPb.class); + return GenieGetSpaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequestPb.java new file mode 100755 index 000000000..f69700223 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get Genie Space */ +@Generated +class GenieGetSpaceRequestPb { + @JsonIgnore private String spaceId; + + public GenieGetSpaceRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetSpaceRequestPb that = (GenieGetSpaceRequestPb) o; + return Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetSpaceRequestPb.class).add("spaceId", spaceId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 8396a9cff..c0579b1f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -24,7 +24,7 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) request.getSpaceId(), request.getConversationId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, GenieMessage.class); @@ -45,7 +45,7 @@ public GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( request.getAttachmentId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); } catch (IOException e) { @@ -62,7 +62,7 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( request.getSpaceId(), request.getConversationId(), request.getMessageId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); } catch (IOException e) { @@ -82,7 +82,7 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes request.getAttachmentId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGenerateDownloadFullQueryResultResponse.class); } catch (IOException e) { @@ -103,7 +103,7 @@ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( request.getDownloadId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetDownloadFullQueryResultResponse.class); } catch (IOException e) { @@ -119,7 +119,7 @@ public GenieMessage getMessage(GenieGetConversationMessageRequest request) { request.getSpaceId(), request.getConversationId(), request.getMessageId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieMessage.class); } catch (IOException e) { @@ -139,7 +139,7 @@ public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult( request.getAttachmentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); } catch (IOException e) { @@ -156,7 +156,7 @@ public GenieGetMessageQueryResultResponse getMessageQueryResult( request.getSpaceId(), request.getConversationId(), request.getMessageId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); } catch (IOException e) { @@ -176,7 +176,7 @@ public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( request.getAttachmentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); } catch (IOException e) { @@ -189,7 +189,7 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { String path = String.format("/api/2.0/genie/spaces/%s", request.getSpaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GenieSpace.class); } catch (IOException e) { @@ -197,6 +197,19 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { } } + @Override + public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { + String path = "/api/2.0/genie/spaces"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieListSpacesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieStartConversationResponse startConversation( GenieStartConversationMessageRequest request) { @@ -204,7 +217,7 @@ public GenieStartConversationResponse startConversation( String.format("/api/2.0/genie/spaces/%s/start-conversation", request.getSpaceId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, GenieStartConversationResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java new file mode 100755 index 000000000..afc75ad99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** List Genie spaces */ +@Generated +@JsonSerialize(using = GenieListSpacesRequest.GenieListSpacesRequestSerializer.class) +@JsonDeserialize(using = GenieListSpacesRequest.GenieListSpacesRequestDeserializer.class) +public class GenieListSpacesRequest { + /** Maximum number of spaces to return per page */ + private Long pageSize; + + /** Pagination token for getting the next page of results */ + private String pageToken; + + public GenieListSpacesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GenieListSpacesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesRequest that = (GenieListSpacesRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } + + GenieListSpacesRequestPb toPb() { + GenieListSpacesRequestPb pb = new GenieListSpacesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static GenieListSpacesRequest fromPb(GenieListSpacesRequestPb pb) { + GenieListSpacesRequest model = new GenieListSpacesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class GenieListSpacesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieListSpacesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieListSpacesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieListSpacesRequestDeserializer + extends JsonDeserializer { + @Override + public GenieListSpacesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieListSpacesRequestPb pb = mapper.readValue(p, GenieListSpacesRequestPb.class); + return GenieListSpacesRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequestPb.java new file mode 100755 index 000000000..c570046f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Genie spaces */ +@Generated +class GenieListSpacesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public GenieListSpacesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GenieListSpacesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesRequestPb that = (GenieListSpacesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java new file mode 100755 index 000000000..152277907 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +@Generated +@JsonSerialize(using = GenieListSpacesResponse.GenieListSpacesResponseSerializer.class) +@JsonDeserialize(using = GenieListSpacesResponse.GenieListSpacesResponseDeserializer.class) +public class GenieListSpacesResponse { + /** Token to get the next page of results */ + private String nextPageToken; + + /** List of Genie spaces */ + private Collection spaces; + + public GenieListSpacesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GenieListSpacesResponse setSpaces(Collection spaces) { + this.spaces = spaces; + return this; + } + + public Collection getSpaces() { + return spaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesResponse that = (GenieListSpacesResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(spaces, that.spaces); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, spaces); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesResponse.class) + .add("nextPageToken", nextPageToken) + .add("spaces", spaces) + .toString(); + } + + GenieListSpacesResponsePb toPb() { + GenieListSpacesResponsePb pb = new GenieListSpacesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSpaces(spaces); + + return pb; + } + + static GenieListSpacesResponse fromPb(GenieListSpacesResponsePb pb) { + GenieListSpacesResponse model = new GenieListSpacesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSpaces(pb.getSpaces()); + + return model; + } + + public static class GenieListSpacesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieListSpacesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieListSpacesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieListSpacesResponseDeserializer + extends JsonDeserializer { + @Override + public GenieListSpacesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieListSpacesResponsePb pb = mapper.readValue(p, GenieListSpacesResponsePb.class); + return GenieListSpacesResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponsePb.java new file mode 100755 index 000000000..9aea93100 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GenieListSpacesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("spaces") + private Collection spaces; + + public GenieListSpacesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GenieListSpacesResponsePb setSpaces(Collection spaces) { + this.spaces = spaces; + return this; + } + + public Collection getSpaces() { + return spaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesResponsePb that = (GenieListSpacesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(spaces, that.spaces); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, spaces); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("spaces", spaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java index db243673c..b3f1b3681 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java @@ -4,53 +4,54 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GenieMessage.GenieMessageSerializer.class) +@JsonDeserialize(using = GenieMessage.GenieMessageDeserializer.class) public class GenieMessage { /** AI-generated response to the message */ - @JsonProperty("attachments") private Collection attachments; /** User message content */ - @JsonProperty("content") private String content; /** Conversation ID */ - @JsonProperty("conversation_id") private String conversationId; /** Timestamp when the message was created */ - @JsonProperty("created_timestamp") private Long createdTimestamp; /** Error message if Genie failed to respond to the message */ - @JsonProperty("error") private MessageError error; /** Message ID. Legacy identifier, use message_id instead */ - @JsonProperty("id") private String id; /** Timestamp when the message was last updated */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Message ID */ - @JsonProperty("message_id") private String messageId; /** * The result of SQL query if the message includes a query attachment. Deprecated. Use * `query_result_metadata` in `GenieQueryAttachment` instead. */ - @JsonProperty("query_result") private Result queryResult; /** Genie space ID */ - @JsonProperty("space_id") private String spaceId; /** @@ -69,11 +70,9 @@ public class GenieMessage { * [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * * `CANCELLED`: Message has been cancelled. */ - @JsonProperty("status") private MessageStatus status; /** ID of the user who created the message */ - @JsonProperty("user_id") private Long userId; public GenieMessage setAttachments(Collection attachments) { @@ -237,4 +236,59 @@ public String toString() { .add("userId", userId) .toString(); } + + GenieMessagePb toPb() { + GenieMessagePb pb = new GenieMessagePb(); + pb.setAttachments(attachments); + pb.setContent(content); + pb.setConversationId(conversationId); + pb.setCreatedTimestamp(createdTimestamp); + pb.setError(error); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setMessageId(messageId); + pb.setQueryResult(queryResult); + pb.setSpaceId(spaceId); + pb.setStatus(status); + pb.setUserId(userId); + + return pb; + } + + static GenieMessage fromPb(GenieMessagePb pb) { + GenieMessage model = new GenieMessage(); + model.setAttachments(pb.getAttachments()); + model.setContent(pb.getContent()); + model.setConversationId(pb.getConversationId()); + model.setCreatedTimestamp(pb.getCreatedTimestamp()); + model.setError(pb.getError()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setMessageId(pb.getMessageId()); + model.setQueryResult(pb.getQueryResult()); + model.setSpaceId(pb.getSpaceId()); + model.setStatus(pb.getStatus()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class GenieMessageSerializer extends JsonSerializer { + @Override + public void serialize(GenieMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieMessageDeserializer extends JsonDeserializer { + @Override + public GenieMessage deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieMessagePb pb = mapper.readValue(p, GenieMessagePb.class); + return GenieMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessagePb.java new file mode 100755 index 000000000..c5c4b9f6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessagePb.java @@ -0,0 +1,210 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GenieMessagePb { + @JsonProperty("attachments") + private Collection attachments; + + @JsonProperty("content") + private String content; + + @JsonProperty("conversation_id") + private String conversationId; + + @JsonProperty("created_timestamp") + private Long createdTimestamp; + + @JsonProperty("error") + private MessageError error; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("message_id") + private String messageId; + + @JsonProperty("query_result") + private Result queryResult; + + @JsonProperty("space_id") + private String spaceId; + + @JsonProperty("status") + private MessageStatus status; + + @JsonProperty("user_id") + private Long userId; + + public GenieMessagePb setAttachments(Collection attachments) { + this.attachments = attachments; + return this; + } + + public Collection getAttachments() { + return attachments; + } + + public GenieMessagePb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public GenieMessagePb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieMessagePb setCreatedTimestamp(Long createdTimestamp) { + this.createdTimestamp = createdTimestamp; + return this; + } + + public Long getCreatedTimestamp() { + return createdTimestamp; + } + + public GenieMessagePb setError(MessageError error) { + this.error = error; + return this; + } + + public MessageError getError() { + return error; + } + + public GenieMessagePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GenieMessagePb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public GenieMessagePb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieMessagePb setQueryResult(Result queryResult) { + this.queryResult = queryResult; + return this; + } + + public Result getQueryResult() { + return queryResult; + } + + public GenieMessagePb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieMessagePb setStatus(MessageStatus status) { + this.status = status; + return this; + } + + public MessageStatus getStatus() { + return status; + } + + public GenieMessagePb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieMessagePb that = (GenieMessagePb) o; + return Objects.equals(attachments, that.attachments) + && Objects.equals(content, that.content) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(createdTimestamp, that.createdTimestamp) + && Objects.equals(error, that.error) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(messageId, that.messageId) + && Objects.equals(queryResult, that.queryResult) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(status, that.status) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + attachments, + content, + conversationId, + createdTimestamp, + error, + id, + lastUpdatedTimestamp, + messageId, + queryResult, + spaceId, + status, + userId); + } + + @Override + public String toString() { + return new ToStringer(GenieMessagePb.class) + .add("attachments", attachments) + .add("content", content) + .add("conversationId", conversationId) + .add("createdTimestamp", createdTimestamp) + .add("error", error) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("messageId", messageId) + .add("queryResult", queryResult) + .add("spaceId", spaceId) + .add("status", status) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java index 5ad10ce62..388592fc3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java @@ -4,40 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenieQueryAttachment.GenieQueryAttachmentSerializer.class) +@JsonDeserialize(using = GenieQueryAttachment.GenieQueryAttachmentDeserializer.class) public class GenieQueryAttachment { /** Description of the query */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("id") private String id; /** Time when the user updated the query last */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** AI generated SQL query */ - @JsonProperty("query") private String query; /** Metadata associated with the query result. */ - @JsonProperty("query_result_metadata") private GenieResultMetadata queryResultMetadata; /** * Statement Execution API statement id. Use [Get status, manifest, and result first * chunk](:method:statementexecution/getstatement) to get the full result data. */ - @JsonProperty("statement_id") private String statementId; /** Name of the query */ - @JsonProperty("title") private String title; public GenieQueryAttachment setDescription(String description) { @@ -135,4 +139,52 @@ public String toString() { .add("title", title) .toString(); } + + GenieQueryAttachmentPb toPb() { + GenieQueryAttachmentPb pb = new GenieQueryAttachmentPb(); + pb.setDescription(description); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setQuery(query); + pb.setQueryResultMetadata(queryResultMetadata); + pb.setStatementId(statementId); + pb.setTitle(title); + + return pb; + } + + static GenieQueryAttachment fromPb(GenieQueryAttachmentPb pb) { + GenieQueryAttachment model = new GenieQueryAttachment(); + model.setDescription(pb.getDescription()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setQuery(pb.getQuery()); + model.setQueryResultMetadata(pb.getQueryResultMetadata()); + model.setStatementId(pb.getStatementId()); + model.setTitle(pb.getTitle()); + + return model; + } + + public static class GenieQueryAttachmentSerializer extends JsonSerializer { + @Override + public void serialize( + GenieQueryAttachment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieQueryAttachmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieQueryAttachmentDeserializer + extends JsonDeserializer { + @Override + public GenieQueryAttachment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieQueryAttachmentPb pb = mapper.readValue(p, GenieQueryAttachmentPb.class); + return GenieQueryAttachment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachmentPb.java new file mode 100755 index 000000000..361d4a011 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachmentPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieQueryAttachmentPb { + @JsonProperty("description") + private String description; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("query") + private String query; + + @JsonProperty("query_result_metadata") + private GenieResultMetadata queryResultMetadata; + + @JsonProperty("statement_id") + private String statementId; + + @JsonProperty("title") + private String title; + + public GenieQueryAttachmentPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public GenieQueryAttachmentPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GenieQueryAttachmentPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public GenieQueryAttachmentPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public GenieQueryAttachmentPb setQueryResultMetadata(GenieResultMetadata queryResultMetadata) { + this.queryResultMetadata = queryResultMetadata; + return this; + } + + public GenieResultMetadata getQueryResultMetadata() { + return queryResultMetadata; + } + + public GenieQueryAttachmentPb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + public GenieQueryAttachmentPb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieQueryAttachmentPb that = (GenieQueryAttachmentPb) o; + return Objects.equals(description, that.description) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(query, that.query) + && Objects.equals(queryResultMetadata, that.queryResultMetadata) + && Objects.equals(statementId, that.statementId) + && Objects.equals(title, that.title); + } + + @Override + public int hashCode() { + return Objects.hash( + description, id, lastUpdatedTimestamp, query, queryResultMetadata, statementId, title); + } + + @Override + public String toString() { + return new ToStringer(GenieQueryAttachmentPb.class) + .add("description", description) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("query", query) + .add("queryResultMetadata", queryResultMetadata) + .add("statementId", statementId) + .add("title", title) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java index 838208d24..a7e29bd55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenieResultMetadata.GenieResultMetadataSerializer.class) +@JsonDeserialize(using = GenieResultMetadata.GenieResultMetadataDeserializer.class) public class GenieResultMetadata { /** Indicates whether the result set is truncated. */ - @JsonProperty("is_truncated") private Boolean isTruncated; /** The number of rows in the result set. */ - @JsonProperty("row_count") private Long rowCount; public GenieResultMetadata setIsTruncated(Boolean isTruncated) { @@ -55,4 +64,41 @@ public String toString() { .add("rowCount", rowCount) .toString(); } + + GenieResultMetadataPb toPb() { + GenieResultMetadataPb pb = new GenieResultMetadataPb(); + pb.setIsTruncated(isTruncated); + pb.setRowCount(rowCount); + + return pb; + } + + static GenieResultMetadata fromPb(GenieResultMetadataPb pb) { + GenieResultMetadata model = new GenieResultMetadata(); + model.setIsTruncated(pb.getIsTruncated()); + model.setRowCount(pb.getRowCount()); + + return model; + } + + public static class GenieResultMetadataSerializer extends JsonSerializer { + @Override + public void serialize(GenieResultMetadata value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieResultMetadataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieResultMetadataDeserializer + extends JsonDeserializer { + @Override + public GenieResultMetadata deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieResultMetadataPb pb = mapper.readValue(p, GenieResultMetadataPb.class); + return GenieResultMetadata.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadataPb.java new file mode 100755 index 000000000..5008d409c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadataPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieResultMetadataPb { + @JsonProperty("is_truncated") + private Boolean isTruncated; + + @JsonProperty("row_count") + private Long rowCount; + + public GenieResultMetadataPb setIsTruncated(Boolean isTruncated) { + this.isTruncated = isTruncated; + return this; + } + + public Boolean getIsTruncated() { + return isTruncated; + } + + public GenieResultMetadataPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieResultMetadataPb that = (GenieResultMetadataPb) o; + return Objects.equals(isTruncated, that.isTruncated) && Objects.equals(rowCount, that.rowCount); + } + + @Override + public int hashCode() { + return Objects.hash(isTruncated, rowCount); + } + + @Override + public String toString() { + return new ToStringer(GenieResultMetadataPb.class) + .add("isTruncated", isTruncated) + .add("rowCount", rowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index b47db20e0..98dd0b4d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -110,6 +110,13 @@ GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( */ GenieSpace getSpace(GenieGetSpaceRequest genieGetSpaceRequest); + /** + * List Genie spaces. + * + *

Get list of Genie Spaces. + */ + GenieListSpacesResponse listSpaces(GenieListSpacesRequest genieListSpacesRequest); + /** * Start conversation. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java index 4ac20ce24..91c960655 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenieSpace.GenieSpaceSerializer.class) +@JsonDeserialize(using = GenieSpace.GenieSpaceDeserializer.class) public class GenieSpace { /** Description of the Genie Space */ - @JsonProperty("description") private String description; /** Genie space ID */ - @JsonProperty("space_id") private String spaceId; /** Title of the Genie Space */ - @JsonProperty("title") private String title; public GenieSpace setDescription(String description) { @@ -71,4 +79,41 @@ public String toString() { .add("title", title) .toString(); } + + GenieSpacePb toPb() { + GenieSpacePb pb = new GenieSpacePb(); + pb.setDescription(description); + pb.setSpaceId(spaceId); + pb.setTitle(title); + + return pb; + } + + static GenieSpace fromPb(GenieSpacePb pb) { + GenieSpace model = new GenieSpace(); + model.setDescription(pb.getDescription()); + model.setSpaceId(pb.getSpaceId()); + model.setTitle(pb.getTitle()); + + return model; + } + + public static class GenieSpaceSerializer extends JsonSerializer { + @Override + public void serialize(GenieSpace value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieSpacePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieSpaceDeserializer extends JsonDeserializer { + @Override + public GenieSpace deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieSpacePb pb = mapper.readValue(p, GenieSpacePb.class); + return GenieSpace.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpacePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpacePb.java new file mode 100755 index 000000000..65154236f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpacePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieSpacePb { + @JsonProperty("description") + private String description; + + @JsonProperty("space_id") + private String spaceId; + + @JsonProperty("title") + private String title; + + public GenieSpacePb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public GenieSpacePb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieSpacePb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieSpacePb that = (GenieSpacePb) o; + return Objects.equals(description, that.description) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(title, that.title); + } + + @Override + public int hashCode() { + return Objects.hash(description, spaceId, title); + } + + @Override + public String toString() { + return new ToStringer(GenieSpacePb.class) + .add("description", description) + .add("spaceId", spaceId) + .add("title", title) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java index 1d4df5b07..917af8236 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java @@ -4,18 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GenieStartConversationMessageRequest.GenieStartConversationMessageRequestSerializer.class) +@JsonDeserialize( + using = + GenieStartConversationMessageRequest.GenieStartConversationMessageRequestDeserializer.class) public class GenieStartConversationMessageRequest { /** The text of the message that starts the conversation. */ - @JsonProperty("content") private String content; /** The ID associated with the Genie space where you want to start a conversation. */ - @JsonIgnore private String spaceId; + private String spaceId; public GenieStartConversationMessageRequest setContent(String content) { this.content = content; @@ -55,4 +68,44 @@ public String toString() { .add("spaceId", spaceId) .toString(); } + + GenieStartConversationMessageRequestPb toPb() { + GenieStartConversationMessageRequestPb pb = new GenieStartConversationMessageRequestPb(); + pb.setContent(content); + pb.setSpaceId(spaceId); + + return pb; + } + + static GenieStartConversationMessageRequest fromPb(GenieStartConversationMessageRequestPb pb) { + GenieStartConversationMessageRequest model = new GenieStartConversationMessageRequest(); + model.setContent(pb.getContent()); + model.setSpaceId(pb.getSpaceId()); + + return model; + } + + public static class GenieStartConversationMessageRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieStartConversationMessageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieStartConversationMessageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieStartConversationMessageRequestDeserializer + extends JsonDeserializer { + @Override + public GenieStartConversationMessageRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieStartConversationMessageRequestPb pb = + mapper.readValue(p, GenieStartConversationMessageRequestPb.class); + return GenieStartConversationMessageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequestPb.java new file mode 100755 index 000000000..e5f692906 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieStartConversationMessageRequestPb { + @JsonProperty("content") + private String content; + + @JsonIgnore private String spaceId; + + public GenieStartConversationMessageRequestPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public GenieStartConversationMessageRequestPb setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieStartConversationMessageRequestPb that = (GenieStartConversationMessageRequestPb) o; + return Objects.equals(content, that.content) && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(content, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieStartConversationMessageRequestPb.class) + .add("content", content) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java index d4a2f8079..7ad1c19cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GenieStartConversationResponse.GenieStartConversationResponseSerializer.class) +@JsonDeserialize( + using = GenieStartConversationResponse.GenieStartConversationResponseDeserializer.class) public class GenieStartConversationResponse { /** */ - @JsonProperty("conversation") private GenieConversation conversation; /** Conversation ID */ - @JsonProperty("conversation_id") private String conversationId; /** */ - @JsonProperty("message") private GenieMessage message; /** Message ID */ - @JsonProperty("message_id") private String messageId; public GenieStartConversationResponse setConversation(GenieConversation conversation) { @@ -86,4 +95,48 @@ public String toString() { .add("messageId", messageId) .toString(); } + + GenieStartConversationResponsePb toPb() { + GenieStartConversationResponsePb pb = new GenieStartConversationResponsePb(); + pb.setConversation(conversation); + pb.setConversationId(conversationId); + pb.setMessage(message); + pb.setMessageId(messageId); + + return pb; + } + + static GenieStartConversationResponse fromPb(GenieStartConversationResponsePb pb) { + GenieStartConversationResponse model = new GenieStartConversationResponse(); + model.setConversation(pb.getConversation()); + model.setConversationId(pb.getConversationId()); + model.setMessage(pb.getMessage()); + model.setMessageId(pb.getMessageId()); + + return model; + } + + public static class GenieStartConversationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GenieStartConversationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenieStartConversationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenieStartConversationResponseDeserializer + extends JsonDeserializer { + @Override + public GenieStartConversationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenieStartConversationResponsePb pb = + mapper.readValue(p, GenieStartConversationResponsePb.class); + return GenieStartConversationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponsePb.java new file mode 100755 index 000000000..28dbc9804 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponsePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenieStartConversationResponsePb { + @JsonProperty("conversation") + private GenieConversation conversation; + + @JsonProperty("conversation_id") + private String conversationId; + + @JsonProperty("message") + private GenieMessage message; + + @JsonProperty("message_id") + private String messageId; + + public GenieStartConversationResponsePb setConversation(GenieConversation conversation) { + this.conversation = conversation; + return this; + } + + public GenieConversation getConversation() { + return conversation; + } + + public GenieStartConversationResponsePb setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieStartConversationResponsePb setMessage(GenieMessage message) { + this.message = message; + return this; + } + + public GenieMessage getMessage() { + return message; + } + + public GenieStartConversationResponsePb setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieStartConversationResponsePb that = (GenieStartConversationResponsePb) o; + return Objects.equals(conversation, that.conversation) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(message, that.message) + && Objects.equals(messageId, that.messageId); + } + + @Override + public int hashCode() { + return Objects.hash(conversation, conversationId, message, messageId); + } + + @Override + public String toString() { + return new ToStringer(GenieStartConversationResponsePb.class) + .add("conversation", conversation) + .add("conversationId", conversationId) + .add("message", message) + .add("messageId", messageId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java index 3c623f7d4..dbee1217a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get dashboard */ @Generated +@JsonSerialize(using = GetDashboardRequest.GetDashboardRequestSerializer.class) +@JsonDeserialize(using = GetDashboardRequest.GetDashboardRequestDeserializer.class) public class GetDashboardRequest { /** UUID identifying the dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; public GetDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetDashboardRequest.class).add("dashboardId", dashboardId).toString(); } + + GetDashboardRequestPb toPb() { + GetDashboardRequestPb pb = new GetDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static GetDashboardRequest fromPb(GetDashboardRequestPb pb) { + GetDashboardRequest model = new GetDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class GetDashboardRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public GetDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDashboardRequestPb pb = mapper.readValue(p, GetDashboardRequestPb.class); + return GetDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequestPb.java new file mode 100755 index 000000000..8e03824d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get dashboard */ +@Generated +class GetDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public GetDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDashboardRequestPb that = (GetDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(GetDashboardRequestPb.class).add("dashboardId", dashboardId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java index d35848af6..28761ddd7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get published dashboard */ @Generated +@JsonSerialize(using = GetPublishedDashboardRequest.GetPublishedDashboardRequestSerializer.class) +@JsonDeserialize( + using = GetPublishedDashboardRequest.GetPublishedDashboardRequestDeserializer.class) public class GetPublishedDashboardRequest { /** UUID identifying the published dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; public GetPublishedDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -41,4 +53,41 @@ public String toString() { .add("dashboardId", dashboardId) .toString(); } + + GetPublishedDashboardRequestPb toPb() { + GetPublishedDashboardRequestPb pb = new GetPublishedDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static GetPublishedDashboardRequest fromPb(GetPublishedDashboardRequestPb pb) { + GetPublishedDashboardRequest model = new GetPublishedDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class GetPublishedDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public GetPublishedDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedDashboardRequestPb pb = mapper.readValue(p, GetPublishedDashboardRequestPb.class); + return GetPublishedDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequestPb.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequestPb.java index b88922e54..7162a01c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequestPb.java @@ -7,13 +7,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Read a published dashboard in an embedded ui. */ +/** Get published dashboard */ @Generated -public class GetPublishedDashboardEmbeddedRequest { - /** UUID identifying the published dashboard. */ +class GetPublishedDashboardRequestPb { @JsonIgnore private String dashboardId; - public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) { + public GetPublishedDashboardRequestPb setDashboardId(String dashboardId) { this.dashboardId = dashboardId; return this; } @@ -26,7 +25,7 @@ public String getDashboardId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o; + GetPublishedDashboardRequestPb that = (GetPublishedDashboardRequestPb) o; return Objects.equals(dashboardId, that.dashboardId); } @@ -37,7 +36,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetPublishedDashboardEmbeddedRequest.class) + return new ToStringer(GetPublishedDashboardRequestPb.class) .add("dashboardId", dashboardId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java index 6bba3f0d8..25d9973f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java @@ -3,25 +3,36 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Read an information of a published dashboard to mint an OAuth token. */ @Generated +@JsonSerialize( + using = + GetPublishedDashboardTokenInfoRequest.GetPublishedDashboardTokenInfoRequestSerializer.class) +@JsonDeserialize( + using = + GetPublishedDashboardTokenInfoRequest.GetPublishedDashboardTokenInfoRequestDeserializer + .class) public class GetPublishedDashboardTokenInfoRequest { /** UUID identifying the published dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** Provided external value to be included in the custom claim. */ - @JsonIgnore - @QueryParam("external_value") private String externalValue; /** Provided external viewer id to be included in the custom claim. */ - @JsonIgnore - @QueryParam("external_viewer_id") private String externalViewerId; public GetPublishedDashboardTokenInfoRequest setDashboardId(String dashboardId) { @@ -74,4 +85,46 @@ public String toString() { .add("externalViewerId", externalViewerId) .toString(); } + + GetPublishedDashboardTokenInfoRequestPb toPb() { + GetPublishedDashboardTokenInfoRequestPb pb = new GetPublishedDashboardTokenInfoRequestPb(); + pb.setDashboardId(dashboardId); + pb.setExternalValue(externalValue); + pb.setExternalViewerId(externalViewerId); + + return pb; + } + + static GetPublishedDashboardTokenInfoRequest fromPb(GetPublishedDashboardTokenInfoRequestPb pb) { + GetPublishedDashboardTokenInfoRequest model = new GetPublishedDashboardTokenInfoRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setExternalValue(pb.getExternalValue()); + model.setExternalViewerId(pb.getExternalViewerId()); + + return model; + } + + public static class GetPublishedDashboardTokenInfoRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedDashboardTokenInfoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedDashboardTokenInfoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedDashboardTokenInfoRequestDeserializer + extends JsonDeserializer { + @Override + public GetPublishedDashboardTokenInfoRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedDashboardTokenInfoRequestPb pb = + mapper.readValue(p, GetPublishedDashboardTokenInfoRequestPb.class); + return GetPublishedDashboardTokenInfoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequestPb.java new file mode 100755 index 000000000..3ee1e8342 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Read an information of a published dashboard to mint an OAuth token. */ +@Generated +class GetPublishedDashboardTokenInfoRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore + @QueryParam("external_value") + private String externalValue; + + @JsonIgnore + @QueryParam("external_viewer_id") + private String externalViewerId; + + public GetPublishedDashboardTokenInfoRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public GetPublishedDashboardTokenInfoRequestPb setExternalValue(String externalValue) { + this.externalValue = externalValue; + return this; + } + + public String getExternalValue() { + return externalValue; + } + + public GetPublishedDashboardTokenInfoRequestPb setExternalViewerId(String externalViewerId) { + this.externalViewerId = externalViewerId; + return this; + } + + public String getExternalViewerId() { + return externalViewerId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardTokenInfoRequestPb that = (GetPublishedDashboardTokenInfoRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(externalValue, that.externalValue) + && Objects.equals(externalViewerId, that.externalViewerId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, externalValue, externalViewerId); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardTokenInfoRequestPb.class) + .add("dashboardId", dashboardId) + .add("externalValue", externalValue) + .add("externalViewerId", externalViewerId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java index e7022d311..769a11435 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java @@ -4,29 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetPublishedDashboardTokenInfoResponse.GetPublishedDashboardTokenInfoResponseSerializer + .class) +@JsonDeserialize( + using = + GetPublishedDashboardTokenInfoResponse.GetPublishedDashboardTokenInfoResponseDeserializer + .class) public class GetPublishedDashboardTokenInfoResponse { /** * Authorization constraints for accessing the published dashboard. Currently includes * `workspace_rule_set` and could be enriched with `unity_catalog_privileges` before oAuth token * generation. */ - @JsonProperty("authorization_details") private Collection authorizationDetails; /** * Custom claim generated from external_value and external_viewer_id. Format: * `urn:aibi:external_data:::` */ - @JsonProperty("custom_claim") private String customClaim; /** Scope defining access permissions. */ - @JsonProperty("scope") private String scope; public GetPublishedDashboardTokenInfoResponse setAuthorizationDetails( @@ -80,4 +94,49 @@ public String toString() { .add("scope", scope) .toString(); } + + GetPublishedDashboardTokenInfoResponsePb toPb() { + GetPublishedDashboardTokenInfoResponsePb pb = new GetPublishedDashboardTokenInfoResponsePb(); + pb.setAuthorizationDetails(authorizationDetails); + pb.setCustomClaim(customClaim); + pb.setScope(scope); + + return pb; + } + + static GetPublishedDashboardTokenInfoResponse fromPb( + GetPublishedDashboardTokenInfoResponsePb pb) { + GetPublishedDashboardTokenInfoResponse model = new GetPublishedDashboardTokenInfoResponse(); + model.setAuthorizationDetails(pb.getAuthorizationDetails()); + model.setCustomClaim(pb.getCustomClaim()); + model.setScope(pb.getScope()); + + return model; + } + + public static class GetPublishedDashboardTokenInfoResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedDashboardTokenInfoResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetPublishedDashboardTokenInfoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedDashboardTokenInfoResponseDeserializer + extends JsonDeserializer { + @Override + public GetPublishedDashboardTokenInfoResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedDashboardTokenInfoResponsePb pb = + mapper.readValue(p, GetPublishedDashboardTokenInfoResponsePb.class); + return GetPublishedDashboardTokenInfoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponsePb.java new file mode 100755 index 000000000..2b1bb5e31 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponsePb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPublishedDashboardTokenInfoResponsePb { + @JsonProperty("authorization_details") + private Collection authorizationDetails; + + @JsonProperty("custom_claim") + private String customClaim; + + @JsonProperty("scope") + private String scope; + + public GetPublishedDashboardTokenInfoResponsePb setAuthorizationDetails( + Collection authorizationDetails) { + this.authorizationDetails = authorizationDetails; + return this; + } + + public Collection getAuthorizationDetails() { + return authorizationDetails; + } + + public GetPublishedDashboardTokenInfoResponsePb setCustomClaim(String customClaim) { + this.customClaim = customClaim; + return this; + } + + public String getCustomClaim() { + return customClaim; + } + + public GetPublishedDashboardTokenInfoResponsePb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardTokenInfoResponsePb that = (GetPublishedDashboardTokenInfoResponsePb) o; + return Objects.equals(authorizationDetails, that.authorizationDetails) + && Objects.equals(customClaim, that.customClaim) + && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(authorizationDetails, customClaim, scope); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardTokenInfoResponsePb.class) + .add("authorizationDetails", authorizationDetails) + .add("customClaim", customClaim) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java index 19c45e234..fc4606e39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get dashboard schedule */ @Generated +@JsonSerialize(using = GetScheduleRequest.GetScheduleRequestSerializer.class) +@JsonDeserialize(using = GetScheduleRequest.GetScheduleRequestDeserializer.class) public class GetScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** UUID identifying the schedule. */ - @JsonIgnore private String scheduleId; + private String scheduleId; public GetScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -55,4 +66,40 @@ public String toString() { .add("scheduleId", scheduleId) .toString(); } + + GetScheduleRequestPb toPb() { + GetScheduleRequestPb pb = new GetScheduleRequestPb(); + pb.setDashboardId(dashboardId); + pb.setScheduleId(scheduleId); + + return pb; + } + + static GetScheduleRequest fromPb(GetScheduleRequestPb pb) { + GetScheduleRequest model = new GetScheduleRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setScheduleId(pb.getScheduleId()); + + return model; + } + + public static class GetScheduleRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetScheduleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetScheduleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetScheduleRequestDeserializer extends JsonDeserializer { + @Override + public GetScheduleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetScheduleRequestPb pb = mapper.readValue(p, GetScheduleRequestPb.class); + return GetScheduleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequestPb.java new file mode 100755 index 000000000..4f2a66d66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get dashboard schedule */ +@Generated +class GetScheduleRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore private String scheduleId; + + public GetScheduleRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public GetScheduleRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetScheduleRequestPb that = (GetScheduleRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(scheduleId, that.scheduleId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, scheduleId); + } + + @Override + public String toString() { + return new ToStringer(GetScheduleRequestPb.class) + .add("dashboardId", dashboardId) + .add("scheduleId", scheduleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java index 8fb7d6e6f..bc82c822c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get schedule subscription */ @Generated +@JsonSerialize(using = GetSubscriptionRequest.GetSubscriptionRequestSerializer.class) +@JsonDeserialize(using = GetSubscriptionRequest.GetSubscriptionRequestDeserializer.class) public class GetSubscriptionRequest { /** UUID identifying the dashboard which the subscription belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** UUID identifying the schedule which the subscription belongs. */ - @JsonIgnore private String scheduleId; + private String scheduleId; /** UUID identifying the subscription. */ - @JsonIgnore private String subscriptionId; + private String subscriptionId; public GetSubscriptionRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -69,4 +80,45 @@ public String toString() { .add("subscriptionId", subscriptionId) .toString(); } + + GetSubscriptionRequestPb toPb() { + GetSubscriptionRequestPb pb = new GetSubscriptionRequestPb(); + pb.setDashboardId(dashboardId); + pb.setScheduleId(scheduleId); + pb.setSubscriptionId(subscriptionId); + + return pb; + } + + static GetSubscriptionRequest fromPb(GetSubscriptionRequestPb pb) { + GetSubscriptionRequest model = new GetSubscriptionRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setScheduleId(pb.getScheduleId()); + model.setSubscriptionId(pb.getSubscriptionId()); + + return model; + } + + public static class GetSubscriptionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetSubscriptionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSubscriptionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSubscriptionRequestDeserializer + extends JsonDeserializer { + @Override + public GetSubscriptionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSubscriptionRequestPb pb = mapper.readValue(p, GetSubscriptionRequestPb.class); + return GetSubscriptionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequestPb.java new file mode 100755 index 000000000..fe9578dc9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequestPb.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get schedule subscription */ +@Generated +class GetSubscriptionRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore private String scheduleId; + + @JsonIgnore private String subscriptionId; + + public GetSubscriptionRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public GetSubscriptionRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + public GetSubscriptionRequestPb setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSubscriptionRequestPb that = (GetSubscriptionRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(scheduleId, that.scheduleId) + && Objects.equals(subscriptionId, that.subscriptionId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, scheduleId, subscriptionId); + } + + @Override + public String toString() { + return new ToStringer(GetSubscriptionRequestPb.class) + .add("dashboardId", dashboardId) + .add("scheduleId", scheduleId) + .add("subscriptionId", subscriptionId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java index ec34c7536..eb5fda3a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java @@ -23,20 +23,6 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) { impl = mock; } - public void getPublishedDashboardEmbedded(String dashboardId) { - getPublishedDashboardEmbedded( - new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId)); - } - - /** - * Read a published dashboard in an embedded ui. - * - *

Get the current published dashboard within an embedded context. - */ - public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { - impl.getPublishedDashboardEmbedded(request); - } - public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) { return getPublishedDashboardTokenInfo( new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java index 38c982eb1..4d542d563 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java @@ -16,21 +16,6 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) { this.apiClient = apiClient; } - @Override - public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { - String path = - String.format( - "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - apiClient.execute(req, GetPublishedDashboardEmbeddedResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( GetPublishedDashboardTokenInfoRequest request) { @@ -39,7 +24,7 @@ public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( "/api/2.0/lakeview/dashboards/%s/published/tokeninfo", request.getDashboardId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPublishedDashboardTokenInfoResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java index a7fbb8cdb..cad465780 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java @@ -12,14 +12,6 @@ */ @Generated public interface LakeviewEmbeddedService { - /** - * Read a published dashboard in an embedded ui. - * - *

Get the current published dashboard within an embedded context. - */ - void getPublishedDashboardEmbedded( - GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest); - /** * Read an information of a published dashboard to mint an OAuth token. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index ac931a0e7..4fbf5992d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -21,7 +21,7 @@ public Dashboard create(CreateDashboardRequest request) { String path = "/api/2.0/lakeview/dashboards"; try { Request req = new Request("POST", path, apiClient.serialize(request.getDashboard())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Dashboard.class); @@ -36,7 +36,7 @@ public Schedule createSchedule(CreateScheduleRequest request) { String.format("/api/2.0/lakeview/dashboards/%s/schedules", request.getDashboardId()); try { Request req = new Request("POST", path, apiClient.serialize(request.getSchedule())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Schedule.class); @@ -53,7 +53,7 @@ public Subscription createSubscription(CreateSubscriptionRequest request) { request.getDashboardId(), request.getScheduleId()); try { Request req = new Request("POST", path, apiClient.serialize(request.getSubscription())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Subscription.class); @@ -70,7 +70,7 @@ public void deleteSchedule(DeleteScheduleRequest request) { request.getDashboardId(), request.getScheduleId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteScheduleResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public void deleteSubscription(DeleteSubscriptionRequest request) { request.getDashboardId(), request.getScheduleId(), request.getSubscriptionId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteSubscriptionResponse.class); } catch (IOException e) { @@ -99,7 +99,7 @@ public Dashboard get(GetDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Dashboard.class); } catch (IOException e) { @@ -113,7 +113,7 @@ public PublishedDashboard getPublished(GetPublishedDashboardRequest request) { String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PublishedDashboard.class); } catch (IOException e) { @@ -129,7 +129,7 @@ public Schedule getSchedule(GetScheduleRequest request) { request.getDashboardId(), request.getScheduleId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Schedule.class); } catch (IOException e) { @@ -145,7 +145,7 @@ public Subscription getSubscription(GetSubscriptionRequest request) { request.getDashboardId(), request.getScheduleId(), request.getSubscriptionId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Subscription.class); } catch (IOException e) { @@ -158,7 +158,7 @@ public ListDashboardsResponse list(ListDashboardsRequest request) { String path = "/api/2.0/lakeview/dashboards"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListDashboardsResponse.class); } catch (IOException e) { @@ -172,7 +172,7 @@ public ListSchedulesResponse listSchedules(ListSchedulesRequest request) { String.format("/api/2.0/lakeview/dashboards/%s/schedules", request.getDashboardId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSchedulesResponse.class); } catch (IOException e) { @@ -188,7 +188,7 @@ public ListSubscriptionsResponse listSubscriptions(ListSubscriptionsRequest requ request.getDashboardId(), request.getScheduleId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSubscriptionsResponse.class); } catch (IOException e) { @@ -201,7 +201,7 @@ public Dashboard migrate(MigrateDashboardRequest request) { String path = "/api/2.0/lakeview/dashboards/migrate"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Dashboard.class); @@ -216,7 +216,7 @@ public PublishedDashboard publish(PublishRequest request) { String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PublishedDashboard.class); @@ -230,7 +230,7 @@ public void trash(TrashDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, TrashDashboardResponse.class); } catch (IOException e) { @@ -244,7 +244,7 @@ public void unpublish(UnpublishDashboardRequest request) { String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, UnpublishDashboardResponse.class); } catch (IOException e) { @@ -257,7 +257,7 @@ public Dashboard update(UpdateDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getDashboard())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Dashboard.class); @@ -274,7 +274,7 @@ public Schedule updateSchedule(UpdateScheduleRequest request) { request.getDashboardId(), request.getScheduleId()); try { Request req = new Request("PUT", path, apiClient.serialize(request.getSchedule())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Schedule.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java index 2a7a7d8ec..bc765c3a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java @@ -3,38 +3,40 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List dashboards */ @Generated +@JsonSerialize(using = ListDashboardsRequest.ListDashboardsRequestSerializer.class) +@JsonDeserialize(using = ListDashboardsRequest.ListDashboardsRequestDeserializer.class) public class ListDashboardsRequest { /** The number of dashboards to return per page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * A page token, received from a previous `ListDashboards` call. This token can be used to * retrieve the subsequent page. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * The flag to include dashboards located in the trash. If unspecified, only active dashboards * will be returned. */ - @JsonIgnore - @QueryParam("show_trashed") private Boolean showTrashed; /** `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. */ - @JsonIgnore - @QueryParam("view") private DashboardView view; public ListDashboardsRequest setPageSize(Long pageSize) { @@ -98,4 +100,47 @@ public String toString() { .add("view", view) .toString(); } + + ListDashboardsRequestPb toPb() { + ListDashboardsRequestPb pb = new ListDashboardsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setShowTrashed(showTrashed); + pb.setView(view); + + return pb; + } + + static ListDashboardsRequest fromPb(ListDashboardsRequestPb pb) { + ListDashboardsRequest model = new ListDashboardsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setShowTrashed(pb.getShowTrashed()); + model.setView(pb.getView()); + + return model; + } + + public static class ListDashboardsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDashboardsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDashboardsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDashboardsRequestDeserializer + extends JsonDeserializer { + @Override + public ListDashboardsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDashboardsRequestPb pb = mapper.readValue(p, ListDashboardsRequestPb.class); + return ListDashboardsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequestPb.java new file mode 100755 index 000000000..09686f15b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List dashboards */ +@Generated +class ListDashboardsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("show_trashed") + private Boolean showTrashed; + + @JsonIgnore + @QueryParam("view") + private DashboardView view; + + public ListDashboardsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDashboardsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListDashboardsRequestPb setShowTrashed(Boolean showTrashed) { + this.showTrashed = showTrashed; + return this; + } + + public Boolean getShowTrashed() { + return showTrashed; + } + + public ListDashboardsRequestPb setView(DashboardView view) { + this.view = view; + return this; + } + + public DashboardView getView() { + return view; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDashboardsRequestPb that = (ListDashboardsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(showTrashed, that.showTrashed) + && Objects.equals(view, that.view); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, showTrashed, view); + } + + @Override + public String toString() { + return new ToStringer(ListDashboardsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("showTrashed", showTrashed) + .add("view", view) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponse.java index de36ca888..db819ccc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListDashboardsResponse.ListDashboardsResponseSerializer.class) +@JsonDeserialize(using = ListDashboardsResponse.ListDashboardsResponseDeserializer.class) public class ListDashboardsResponse { /** */ - @JsonProperty("dashboards") private Collection dashboards; /** * A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, * there are no subsequent dashboards. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListDashboardsResponse setDashboards(Collection dashboards) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListDashboardsResponsePb toPb() { + ListDashboardsResponsePb pb = new ListDashboardsResponsePb(); + pb.setDashboards(dashboards); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListDashboardsResponse fromPb(ListDashboardsResponsePb pb) { + ListDashboardsResponse model = new ListDashboardsResponse(); + model.setDashboards(pb.getDashboards()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListDashboardsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDashboardsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDashboardsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDashboardsResponseDeserializer + extends JsonDeserializer { + @Override + public ListDashboardsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDashboardsResponsePb pb = mapper.readValue(p, ListDashboardsResponsePb.class); + return ListDashboardsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponsePb.java new file mode 100755 index 000000000..833ce86a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListDashboardsResponsePb { + @JsonProperty("dashboards") + private Collection dashboards; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDashboardsResponsePb setDashboards(Collection dashboards) { + this.dashboards = dashboards; + return this; + } + + public Collection getDashboards() { + return dashboards; + } + + public ListDashboardsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDashboardsResponsePb that = (ListDashboardsResponsePb) o; + return Objects.equals(dashboards, that.dashboards) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(dashboards, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDashboardsResponsePb.class) + .add("dashboards", dashboards) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java index 9ba29e91a..a6b01171d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java @@ -3,28 +3,34 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List dashboard schedules */ @Generated +@JsonSerialize(using = ListSchedulesRequest.ListSchedulesRequestSerializer.class) +@JsonDeserialize(using = ListSchedulesRequest.ListSchedulesRequestDeserializer.class) public class ListSchedulesRequest { /** UUID identifying the dashboard to which the schedules belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** The number of schedules to return per page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * A page token, received from a previous `ListSchedules` call. Use this to retrieve the * subsequent page. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListSchedulesRequest setDashboardId(String dashboardId) { @@ -77,4 +83,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListSchedulesRequestPb toPb() { + ListSchedulesRequestPb pb = new ListSchedulesRequestPb(); + pb.setDashboardId(dashboardId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListSchedulesRequest fromPb(ListSchedulesRequestPb pb) { + ListSchedulesRequest model = new ListSchedulesRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListSchedulesRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListSchedulesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSchedulesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSchedulesRequestDeserializer + extends JsonDeserializer { + @Override + public ListSchedulesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSchedulesRequestPb pb = mapper.readValue(p, ListSchedulesRequestPb.class); + return ListSchedulesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequestPb.java new file mode 100755 index 000000000..bb2993e75 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List dashboard schedules */ +@Generated +class ListSchedulesRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListSchedulesRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ListSchedulesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListSchedulesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSchedulesRequestPb that = (ListSchedulesRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListSchedulesRequestPb.class) + .add("dashboardId", dashboardId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponse.java index 3c3b29c79..0689ebe62 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSchedulesResponse.ListSchedulesResponseSerializer.class) +@JsonDeserialize(using = ListSchedulesResponse.ListSchedulesResponseDeserializer.class) public class ListSchedulesResponse { /** * A token that can be used as a `page_token` in subsequent requests to retrieve the next page of * results. If this field is omitted, there are no subsequent schedules. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("schedules") private Collection schedules; public ListSchedulesResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("schedules", schedules) .toString(); } + + ListSchedulesResponsePb toPb() { + ListSchedulesResponsePb pb = new ListSchedulesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSchedules(schedules); + + return pb; + } + + static ListSchedulesResponse fromPb(ListSchedulesResponsePb pb) { + ListSchedulesResponse model = new ListSchedulesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSchedules(pb.getSchedules()); + + return model; + } + + public static class ListSchedulesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListSchedulesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSchedulesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSchedulesResponseDeserializer + extends JsonDeserializer { + @Override + public ListSchedulesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSchedulesResponsePb pb = mapper.readValue(p, ListSchedulesResponsePb.class); + return ListSchedulesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponsePb.java new file mode 100755 index 000000000..36259b89f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSchedulesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("schedules") + private Collection schedules; + + public ListSchedulesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSchedulesResponsePb setSchedules(Collection schedules) { + this.schedules = schedules; + return this; + } + + public Collection getSchedules() { + return schedules; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSchedulesResponsePb that = (ListSchedulesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(schedules, that.schedules); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, schedules); + } + + @Override + public String toString() { + return new ToStringer(ListSchedulesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("schedules", schedules) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java index 275fa17db..1061047c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java @@ -3,32 +3,38 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List schedule subscriptions */ @Generated +@JsonSerialize(using = ListSubscriptionsRequest.ListSubscriptionsRequestSerializer.class) +@JsonDeserialize(using = ListSubscriptionsRequest.ListSubscriptionsRequestDeserializer.class) public class ListSubscriptionsRequest { /** UUID identifying the dashboard which the subscriptions belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** The number of subscriptions to return per page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the * subsequent page. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** UUID identifying the schedule which the subscriptions belongs. */ - @JsonIgnore private String scheduleId; + private String scheduleId; public ListSubscriptionsRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -91,4 +97,47 @@ public String toString() { .add("scheduleId", scheduleId) .toString(); } + + ListSubscriptionsRequestPb toPb() { + ListSubscriptionsRequestPb pb = new ListSubscriptionsRequestPb(); + pb.setDashboardId(dashboardId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setScheduleId(scheduleId); + + return pb; + } + + static ListSubscriptionsRequest fromPb(ListSubscriptionsRequestPb pb) { + ListSubscriptionsRequest model = new ListSubscriptionsRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setScheduleId(pb.getScheduleId()); + + return model; + } + + public static class ListSubscriptionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListSubscriptionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSubscriptionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSubscriptionsRequestDeserializer + extends JsonDeserializer { + @Override + public ListSubscriptionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSubscriptionsRequestPb pb = mapper.readValue(p, ListSubscriptionsRequestPb.class); + return ListSubscriptionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequestPb.java new file mode 100755 index 000000000..6c76f44ca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List schedule subscriptions */ +@Generated +class ListSubscriptionsRequestPb { + @JsonIgnore private String dashboardId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private String scheduleId; + + public ListSubscriptionsRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ListSubscriptionsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListSubscriptionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListSubscriptionsRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSubscriptionsRequestPb that = (ListSubscriptionsRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(scheduleId, that.scheduleId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, pageSize, pageToken, scheduleId); + } + + @Override + public String toString() { + return new ToStringer(ListSubscriptionsRequestPb.class) + .add("dashboardId", dashboardId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("scheduleId", scheduleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponse.java index 2c8a3a199..8c211689a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSubscriptionsResponse.ListSubscriptionsResponseSerializer.class) +@JsonDeserialize(using = ListSubscriptionsResponse.ListSubscriptionsResponseDeserializer.class) public class ListSubscriptionsResponse { /** * A token that can be used as a `page_token` in subsequent requests to retrieve the next page of * results. If this field is omitted, there are no subsequent subscriptions. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("subscriptions") private Collection subscriptions; public ListSubscriptionsResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("subscriptions", subscriptions) .toString(); } + + ListSubscriptionsResponsePb toPb() { + ListSubscriptionsResponsePb pb = new ListSubscriptionsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSubscriptions(subscriptions); + + return pb; + } + + static ListSubscriptionsResponse fromPb(ListSubscriptionsResponsePb pb) { + ListSubscriptionsResponse model = new ListSubscriptionsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSubscriptions(pb.getSubscriptions()); + + return model; + } + + public static class ListSubscriptionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListSubscriptionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSubscriptionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSubscriptionsResponseDeserializer + extends JsonDeserializer { + @Override + public ListSubscriptionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSubscriptionsResponsePb pb = mapper.readValue(p, ListSubscriptionsResponsePb.class); + return ListSubscriptionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponsePb.java new file mode 100755 index 000000000..74b04a5a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSubscriptionsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("subscriptions") + private Collection subscriptions; + + public ListSubscriptionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSubscriptionsResponsePb setSubscriptions(Collection subscriptions) { + this.subscriptions = subscriptions; + return this; + } + + public Collection getSubscriptions() { + return subscriptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSubscriptionsResponsePb that = (ListSubscriptionsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(subscriptions, that.subscriptions); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, subscriptions); + } + + @Override + public String toString() { + return new ToStringer(ListSubscriptionsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("subscriptions", subscriptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageError.java index 955118cf8..3b3db42c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageError.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MessageError.MessageErrorSerializer.class) +@JsonDeserialize(using = MessageError.MessageErrorDeserializer.class) public class MessageError { /** */ - @JsonProperty("error") private String error; /** */ - @JsonProperty("type") private MessageErrorType typeValue; public MessageError setError(String error) { @@ -55,4 +64,39 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + MessageErrorPb toPb() { + MessageErrorPb pb = new MessageErrorPb(); + pb.setError(error); + pb.setType(typeValue); + + return pb; + } + + static MessageError fromPb(MessageErrorPb pb) { + MessageError model = new MessageError(); + model.setError(pb.getError()); + model.setType(pb.getType()); + + return model; + } + + public static class MessageErrorSerializer extends JsonSerializer { + @Override + public void serialize(MessageError value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MessageErrorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MessageErrorDeserializer extends JsonDeserializer { + @Override + public MessageError deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MessageErrorPb pb = mapper.readValue(p, MessageErrorPb.class); + return MessageError.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorPb.java new file mode 100755 index 000000000..e15366111 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MessageErrorPb { + @JsonProperty("error") + private String error; + + @JsonProperty("type") + private MessageErrorType typeValue; + + public MessageErrorPb setError(String error) { + this.error = error; + return this; + } + + public String getError() { + return error; + } + + public MessageErrorPb setType(MessageErrorType typeValue) { + this.typeValue = typeValue; + return this; + } + + public MessageErrorType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MessageErrorPb that = (MessageErrorPb) o; + return Objects.equals(error, that.error) && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(error, typeValue); + } + + @Override + public String toString() { + return new ToStringer(MessageErrorPb.class) + .add("error", error) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java index 674a9f461..e6b022157 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MigrateDashboardRequest.MigrateDashboardRequestSerializer.class) +@JsonDeserialize(using = MigrateDashboardRequest.MigrateDashboardRequestDeserializer.class) public class MigrateDashboardRequest { /** Display name for the new Lakeview dashboard. */ - @JsonProperty("display_name") private String displayName; /** The workspace path of the folder to contain the migrated Lakeview dashboard. */ - @JsonProperty("parent_path") private String parentPath; /** UUID of the dashboard to be migrated. */ - @JsonProperty("source_dashboard_id") private String sourceDashboardId; /** * Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named * syntax (:param) when converting datasets in the dashboard. */ - @JsonProperty("update_parameter_syntax") private Boolean updateParameterSyntax; public MigrateDashboardRequest setDisplayName(String displayName) { @@ -89,4 +96,47 @@ public String toString() { .add("updateParameterSyntax", updateParameterSyntax) .toString(); } + + MigrateDashboardRequestPb toPb() { + MigrateDashboardRequestPb pb = new MigrateDashboardRequestPb(); + pb.setDisplayName(displayName); + pb.setParentPath(parentPath); + pb.setSourceDashboardId(sourceDashboardId); + pb.setUpdateParameterSyntax(updateParameterSyntax); + + return pb; + } + + static MigrateDashboardRequest fromPb(MigrateDashboardRequestPb pb) { + MigrateDashboardRequest model = new MigrateDashboardRequest(); + model.setDisplayName(pb.getDisplayName()); + model.setParentPath(pb.getParentPath()); + model.setSourceDashboardId(pb.getSourceDashboardId()); + model.setUpdateParameterSyntax(pb.getUpdateParameterSyntax()); + + return model; + } + + public static class MigrateDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + MigrateDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MigrateDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MigrateDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public MigrateDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MigrateDashboardRequestPb pb = mapper.readValue(p, MigrateDashboardRequestPb.class); + return MigrateDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequestPb.java new file mode 100755 index 000000000..1c37d172e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MigrateDashboardRequestPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("source_dashboard_id") + private String sourceDashboardId; + + @JsonProperty("update_parameter_syntax") + private Boolean updateParameterSyntax; + + public MigrateDashboardRequestPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public MigrateDashboardRequestPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public MigrateDashboardRequestPb setSourceDashboardId(String sourceDashboardId) { + this.sourceDashboardId = sourceDashboardId; + return this; + } + + public String getSourceDashboardId() { + return sourceDashboardId; + } + + public MigrateDashboardRequestPb setUpdateParameterSyntax(Boolean updateParameterSyntax) { + this.updateParameterSyntax = updateParameterSyntax; + return this; + } + + public Boolean getUpdateParameterSyntax() { + return updateParameterSyntax; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MigrateDashboardRequestPb that = (MigrateDashboardRequestPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(sourceDashboardId, that.sourceDashboardId) + && Objects.equals(updateParameterSyntax, that.updateParameterSyntax); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, parentPath, sourceDashboardId, updateParameterSyntax); + } + + @Override + public String toString() { + return new ToStringer(MigrateDashboardRequestPb.class) + .add("displayName", displayName) + .add("parentPath", parentPath) + .add("sourceDashboardId", sourceDashboardId) + .add("updateParameterSyntax", updateParameterSyntax) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java deleted file mode 100755 index f041070b2..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java +++ /dev/null @@ -1,45 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class PendingStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - public PendingStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PendingStatus that = (PendingStatus) o; - return Objects.equals(dataToken, that.dataToken); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken); - } - - @Override - public String toString() { - return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java deleted file mode 100755 index 958dd8311..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java +++ /dev/null @@ -1,80 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Collection; -import java.util.Objects; - -/** Poll the results for the a query for a published, embedded dashboard */ -@Generated -public class PollPublishedQueryStatusRequest { - /** */ - @JsonIgnore - @QueryParam("dashboard_name") - private String dashboardName; - - /** */ - @JsonIgnore - @QueryParam("dashboard_revision_id") - private String dashboardRevisionId; - - /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ - @JsonIgnore - @QueryParam("tokens") - private Collection tokens; - - public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public PollPublishedQueryStatusRequest setTokens(Collection tokens) { - this.tokens = tokens; - return this; - } - - public Collection getTokens() { - return tokens; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, tokens); - } - - @Override - public String toString() { - return new ToStringer(PollPublishedQueryStatusRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("tokens", tokens) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java index 1e477982a..756010eed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java @@ -4,26 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PublishRequest.PublishRequestSerializer.class) +@JsonDeserialize(using = PublishRequest.PublishRequestDeserializer.class) public class PublishRequest { /** UUID identifying the dashboard to be published. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** * Flag to indicate if the publisher's credentials should be embedded in the published dashboard. * These embedded credentials will be used to execute the published dashboard's queries. */ - @JsonProperty("embed_credentials") private Boolean embedCredentials; /** * The ID of the warehouse that can be used to override the warehouse which was set in the draft. */ - @JsonProperty("warehouse_id") private String warehouseId; public PublishRequest setDashboardId(String dashboardId) { @@ -76,4 +84,42 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + PublishRequestPb toPb() { + PublishRequestPb pb = new PublishRequestPb(); + pb.setDashboardId(dashboardId); + pb.setEmbedCredentials(embedCredentials); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static PublishRequest fromPb(PublishRequestPb pb) { + PublishRequest model = new PublishRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setEmbedCredentials(pb.getEmbedCredentials()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class PublishRequestSerializer extends JsonSerializer { + @Override + public void serialize(PublishRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PublishRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PublishRequestDeserializer extends JsonDeserializer { + @Override + public PublishRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PublishRequestPb pb = mapper.readValue(p, PublishRequestPb.class); + return PublishRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequestPb.java new file mode 100755 index 000000000..141ed324c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PublishRequestPb { + @JsonIgnore private String dashboardId; + + @JsonProperty("embed_credentials") + private Boolean embedCredentials; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public PublishRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public PublishRequestPb setEmbedCredentials(Boolean embedCredentials) { + this.embedCredentials = embedCredentials; + return this; + } + + public Boolean getEmbedCredentials() { + return embedCredentials; + } + + public PublishRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PublishRequestPb that = (PublishRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(embedCredentials, that.embedCredentials) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, embedCredentials, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(PublishRequestPb.class) + .add("dashboardId", dashboardId) + .add("embedCredentials", embedCredentials) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java index c8133c4f2..3118426c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PublishedDashboard.PublishedDashboardSerializer.class) +@JsonDeserialize(using = PublishedDashboard.PublishedDashboardDeserializer.class) public class PublishedDashboard { /** The display name of the published dashboard. */ - @JsonProperty("display_name") private String displayName; /** Indicates whether credentials are embedded in the published dashboard. */ - @JsonProperty("embed_credentials") private Boolean embedCredentials; /** The timestamp of when the published dashboard was last revised. */ - @JsonProperty("revision_create_time") private String revisionCreateTime; /** The warehouse ID used to run the published dashboard. */ - @JsonProperty("warehouse_id") private String warehouseId; public PublishedDashboard setDisplayName(String displayName) { @@ -86,4 +93,44 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + PublishedDashboardPb toPb() { + PublishedDashboardPb pb = new PublishedDashboardPb(); + pb.setDisplayName(displayName); + pb.setEmbedCredentials(embedCredentials); + pb.setRevisionCreateTime(revisionCreateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static PublishedDashboard fromPb(PublishedDashboardPb pb) { + PublishedDashboard model = new PublishedDashboard(); + model.setDisplayName(pb.getDisplayName()); + model.setEmbedCredentials(pb.getEmbedCredentials()); + model.setRevisionCreateTime(pb.getRevisionCreateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class PublishedDashboardSerializer extends JsonSerializer { + @Override + public void serialize(PublishedDashboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PublishedDashboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PublishedDashboardDeserializer extends JsonDeserializer { + @Override + public PublishedDashboard deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PublishedDashboardPb pb = mapper.readValue(p, PublishedDashboardPb.class); + return PublishedDashboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboardPb.java new file mode 100755 index 000000000..ab305c960 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboardPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PublishedDashboardPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("embed_credentials") + private Boolean embedCredentials; + + @JsonProperty("revision_create_time") + private String revisionCreateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public PublishedDashboardPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public PublishedDashboardPb setEmbedCredentials(Boolean embedCredentials) { + this.embedCredentials = embedCredentials; + return this; + } + + public Boolean getEmbedCredentials() { + return embedCredentials; + } + + public PublishedDashboardPb setRevisionCreateTime(String revisionCreateTime) { + this.revisionCreateTime = revisionCreateTime; + return this; + } + + public String getRevisionCreateTime() { + return revisionCreateTime; + } + + public PublishedDashboardPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PublishedDashboardPb that = (PublishedDashboardPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(embedCredentials, that.embedCredentials) + && Objects.equals(revisionCreateTime, that.revisionCreateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, embedCredentials, revisionCreateTime, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(PublishedDashboardPb.class) + .add("displayName", displayName) + .add("embedCredentials", embedCredentials) + .add("revisionCreateTime", revisionCreateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java deleted file mode 100755 index eb016a2f8..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java +++ /dev/null @@ -1,68 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Query execution APIs for AI / BI Dashboards */ -@Generated -public class QueryExecutionAPI { - private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class); - - private final QueryExecutionService impl; - - /** Regular-use constructor */ - public QueryExecutionAPI(ApiClient apiClient) { - impl = new QueryExecutionImpl(apiClient); - } - - /** Constructor for mocks */ - public QueryExecutionAPI(QueryExecutionService mock) { - impl = mock; - } - - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - String dashboardName, String dashboardRevisionId) { - return cancelPublishedQueryExecution( - new CancelPublishedQueryExecutionRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Cancel the results for the a query for a published, embedded dashboard. */ - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest request) { - return impl.cancelPublishedQueryExecution(request); - } - - public void executePublishedDashboardQuery(String dashboardName, String dashboardRevisionId) { - executePublishedDashboardQuery( - new ExecutePublishedDashboardQueryRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Execute a query for a published dashboard. */ - public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { - impl.executePublishedDashboardQuery(request); - } - - public PollQueryStatusResponse pollPublishedQueryStatus( - String dashboardName, String dashboardRevisionId) { - return pollPublishedQueryStatus( - new PollPublishedQueryStatusRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Poll the results for the a query for a published, embedded dashboard. */ - public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { - return impl.pollPublishedQueryStatus(request); - } - - public QueryExecutionService impl() { - return impl; - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java deleted file mode 100755 index 46db1f805..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.http.Request; -import com.databricks.sdk.support.Generated; -import java.io.IOException; - -/** Package-local implementation of QueryExecution */ -@Generated -class QueryExecutionImpl implements QueryExecutionService { - private final ApiClient apiClient; - - public QueryExecutionImpl(ApiClient apiClient) { - this.apiClient = apiClient; - } - - @Override - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, CancelQueryExecutionResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ExecuteQueryResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, PollQueryStatusResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java deleted file mode 100755 index d30cda5b6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java +++ /dev/null @@ -1,26 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; - -/** - * Query execution APIs for AI / BI Dashboards - * - *

This is the high-level interface, that contains generated methods. - * - *

Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface QueryExecutionService { - /** Cancel the results for the a query for a published, embedded dashboard. */ - CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest); - - /** Execute a query for a published dashboard. */ - void executePublishedDashboardQuery( - ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest); - - /** Poll the results for the a query for a published, embedded dashboard. */ - PollQueryStatusResponse pollPublishedQueryStatus( - PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java deleted file mode 100755 index 334f3d007..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class QueryResponseStatus { - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("canceled") - private Empty canceled; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("closed") - private Empty closed; - - /** */ - @JsonProperty("pending") - private PendingStatus pending; - - /** - * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be - * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit - * logging purpose to record the statement_id of all QueryResponseStatus. - */ - @JsonProperty("statement_id") - private String statementId; - - /** */ - @JsonProperty("success") - private SuccessStatus success; - - public QueryResponseStatus setCanceled(Empty canceled) { - this.canceled = canceled; - return this; - } - - public Empty getCanceled() { - return canceled; - } - - public QueryResponseStatus setClosed(Empty closed) { - this.closed = closed; - return this; - } - - public Empty getClosed() { - return closed; - } - - public QueryResponseStatus setPending(PendingStatus pending) { - this.pending = pending; - return this; - } - - public PendingStatus getPending() { - return pending; - } - - public QueryResponseStatus setStatementId(String statementId) { - this.statementId = statementId; - return this; - } - - public String getStatementId() { - return statementId; - } - - public QueryResponseStatus setSuccess(SuccessStatus success) { - this.success = success; - return this; - } - - public SuccessStatus getSuccess() { - return success; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QueryResponseStatus that = (QueryResponseStatus) o; - return Objects.equals(canceled, that.canceled) - && Objects.equals(closed, that.closed) - && Objects.equals(pending, that.pending) - && Objects.equals(statementId, that.statementId) - && Objects.equals(success, that.success); - } - - @Override - public int hashCode() { - return Objects.hash(canceled, closed, pending, statementId, success); - } - - @Override - public String toString() { - return new ToStringer(QueryResponseStatus.class) - .add("canceled", canceled) - .add("closed", closed) - .add("pending", pending) - .add("statementId", statementId) - .add("success", success) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java index de3b6451a..d74f77089 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Result.ResultSerializer.class) +@JsonDeserialize(using = Result.ResultDeserializer.class) public class Result { /** If result is truncated */ - @JsonProperty("is_truncated") private Boolean isTruncated; /** Row count of the result */ - @JsonProperty("row_count") private Long rowCount; /** * Statement Execution API statement id. Use [Get status, manifest, and result first * chunk](:method:statementexecution/getstatement) to get the full result data. */ - @JsonProperty("statement_id") private String statementId; public Result setIsTruncated(Boolean isTruncated) { @@ -74,4 +82,41 @@ public String toString() { .add("statementId", statementId) .toString(); } + + ResultPb toPb() { + ResultPb pb = new ResultPb(); + pb.setIsTruncated(isTruncated); + pb.setRowCount(rowCount); + pb.setStatementId(statementId); + + return pb; + } + + static Result fromPb(ResultPb pb) { + Result model = new Result(); + model.setIsTruncated(pb.getIsTruncated()); + model.setRowCount(pb.getRowCount()); + model.setStatementId(pb.getStatementId()); + + return model; + } + + public static class ResultSerializer extends JsonSerializer { + @Override + public void serialize(Result value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultDeserializer extends JsonDeserializer { + @Override + public Result deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultPb pb = mapper.readValue(p, ResultPb.class); + return Result.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ResultPb.java new file mode 100755 index 000000000..55914b9d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ResultPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResultPb { + @JsonProperty("is_truncated") + private Boolean isTruncated; + + @JsonProperty("row_count") + private Long rowCount; + + @JsonProperty("statement_id") + private String statementId; + + public ResultPb setIsTruncated(Boolean isTruncated) { + this.isTruncated = isTruncated; + return this; + } + + public Boolean getIsTruncated() { + return isTruncated; + } + + public ResultPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + public ResultPb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultPb that = (ResultPb) o; + return Objects.equals(isTruncated, that.isTruncated) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(statementId, that.statementId); + } + + @Override + public int hashCode() { + return Objects.hash(isTruncated, rowCount, statementId); + } + + @Override + public String toString() { + return new ToStringer(ResultPb.class) + .add("isTruncated", isTruncated) + .add("rowCount", rowCount) + .add("statementId", statementId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java index 0cf82ee0d..7459fba1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Schedule.ScheduleSerializer.class) +@JsonDeserialize(using = Schedule.ScheduleDeserializer.class) public class Schedule { /** A timestamp indicating when the schedule was created. */ - @JsonProperty("create_time") private String createTime; /** The cron expression describing the frequency of the periodic refresh for this schedule. */ - @JsonProperty("cron_schedule") private CronSchedule cronSchedule; /** UUID identifying the dashboard to which the schedule belongs. */ - @JsonProperty("dashboard_id") private String dashboardId; /** The display name for schedule. */ - @JsonProperty("display_name") private String displayName; /** @@ -30,23 +37,18 @@ public class Schedule { * that the schedule has not been modified since the last read, and can be optionally provided on * delete. */ - @JsonProperty("etag") private String etag; /** The status indicates whether this schedule is paused or not. */ - @JsonProperty("pause_status") private SchedulePauseStatus pauseStatus; /** UUID identifying the schedule. */ - @JsonProperty("schedule_id") private String scheduleId; /** A timestamp indicating when the schedule was last updated. */ - @JsonProperty("update_time") private String updateTime; /** The warehouse id to run the dashboard with for the schedule. */ - @JsonProperty("warehouse_id") private String warehouseId; public Schedule setCreateTime(String createTime) { @@ -174,4 +176,53 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + SchedulePb toPb() { + SchedulePb pb = new SchedulePb(); + pb.setCreateTime(createTime); + pb.setCronSchedule(cronSchedule); + pb.setDashboardId(dashboardId); + pb.setDisplayName(displayName); + pb.setEtag(etag); + pb.setPauseStatus(pauseStatus); + pb.setScheduleId(scheduleId); + pb.setUpdateTime(updateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static Schedule fromPb(SchedulePb pb) { + Schedule model = new Schedule(); + model.setCreateTime(pb.getCreateTime()); + model.setCronSchedule(pb.getCronSchedule()); + model.setDashboardId(pb.getDashboardId()); + model.setDisplayName(pb.getDisplayName()); + model.setEtag(pb.getEtag()); + model.setPauseStatus(pb.getPauseStatus()); + model.setScheduleId(pb.getScheduleId()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class ScheduleSerializer extends JsonSerializer { + @Override + public void serialize(Schedule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ScheduleDeserializer extends JsonDeserializer { + @Override + public Schedule deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SchedulePb pb = mapper.readValue(p, SchedulePb.class); + return Schedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePb.java new file mode 100755 index 000000000..9dff6ca90 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePb.java @@ -0,0 +1,164 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SchedulePb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("cron_schedule") + private CronSchedule cronSchedule; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("pause_status") + private SchedulePauseStatus pauseStatus; + + @JsonProperty("schedule_id") + private String scheduleId; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public SchedulePb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public SchedulePb setCronSchedule(CronSchedule cronSchedule) { + this.cronSchedule = cronSchedule; + return this; + } + + public CronSchedule getCronSchedule() { + return cronSchedule; + } + + public SchedulePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public SchedulePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public SchedulePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public SchedulePb setPauseStatus(SchedulePauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public SchedulePauseStatus getPauseStatus() { + return pauseStatus; + } + + public SchedulePb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + public SchedulePb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public SchedulePb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SchedulePb that = (SchedulePb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(cronSchedule, that.cronSchedule) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(etag, that.etag) + && Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(scheduleId, that.scheduleId) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + cronSchedule, + dashboardId, + displayName, + etag, + pauseStatus, + scheduleId, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(SchedulePb.class) + .add("createTime", createTime) + .add("cronSchedule", cronSchedule) + .add("dashboardId", dashboardId) + .add("displayName", displayName) + .add("etag", etag) + .add("pauseStatus", pauseStatus) + .add("scheduleId", scheduleId) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscriber.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscriber.java index b1677565e..475099495 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscriber.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscriber.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Subscriber.SubscriberSerializer.class) +@JsonDeserialize(using = Subscriber.SubscriberDeserializer.class) public class Subscriber { /** * The destination to receive the subscription email. This parameter is mutually exclusive with * `user_subscriber`. */ - @JsonProperty("destination_subscriber") private SubscriptionSubscriberDestination destinationSubscriber; /** * The user to receive the subscription email. This parameter is mutually exclusive with * `destination_subscriber`. */ - @JsonProperty("user_subscriber") private SubscriptionSubscriberUser userSubscriber; public Subscriber setDestinationSubscriber( @@ -63,4 +72,39 @@ public String toString() { .add("userSubscriber", userSubscriber) .toString(); } + + SubscriberPb toPb() { + SubscriberPb pb = new SubscriberPb(); + pb.setDestinationSubscriber(destinationSubscriber); + pb.setUserSubscriber(userSubscriber); + + return pb; + } + + static Subscriber fromPb(SubscriberPb pb) { + Subscriber model = new Subscriber(); + model.setDestinationSubscriber(pb.getDestinationSubscriber()); + model.setUserSubscriber(pb.getUserSubscriber()); + + return model; + } + + public static class SubscriberSerializer extends JsonSerializer { + @Override + public void serialize(Subscriber value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriberPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriberDeserializer extends JsonDeserializer { + @Override + public Subscriber deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriberPb pb = mapper.readValue(p, SubscriberPb.class); + return Subscriber.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriberPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriberPb.java new file mode 100755 index 000000000..35ba343fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriberPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SubscriberPb { + @JsonProperty("destination_subscriber") + private SubscriptionSubscriberDestination destinationSubscriber; + + @JsonProperty("user_subscriber") + private SubscriptionSubscriberUser userSubscriber; + + public SubscriberPb setDestinationSubscriber( + SubscriptionSubscriberDestination destinationSubscriber) { + this.destinationSubscriber = destinationSubscriber; + return this; + } + + public SubscriptionSubscriberDestination getDestinationSubscriber() { + return destinationSubscriber; + } + + public SubscriberPb setUserSubscriber(SubscriptionSubscriberUser userSubscriber) { + this.userSubscriber = userSubscriber; + return this; + } + + public SubscriptionSubscriberUser getUserSubscriber() { + return userSubscriber; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriberPb that = (SubscriberPb) o; + return Objects.equals(destinationSubscriber, that.destinationSubscriber) + && Objects.equals(userSubscriber, that.userSubscriber); + } + + @Override + public int hashCode() { + return Objects.hash(destinationSubscriber, userSubscriber); + } + + @Override + public String toString() { + return new ToStringer(SubscriberPb.class) + .add("destinationSubscriber", destinationSubscriber) + .add("userSubscriber", userSubscriber) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java index c271bee40..ec7b6d7a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java @@ -4,47 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Subscription.SubscriptionSerializer.class) +@JsonDeserialize(using = Subscription.SubscriptionDeserializer.class) public class Subscription { /** A timestamp indicating when the subscription was created. */ - @JsonProperty("create_time") private String createTime; /** * UserId of the user who adds subscribers (users or notification destinations) to the dashboard's * schedule. */ - @JsonProperty("created_by_user_id") private Long createdByUserId; /** UUID identifying the dashboard to which the subscription belongs. */ - @JsonProperty("dashboard_id") private String dashboardId; /** * The etag for the subscription. Must be left empty on create, can be optionally provided on * delete to ensure that the subscription has not been deleted since the last read. */ - @JsonProperty("etag") private String etag; /** UUID identifying the schedule to which the subscription belongs. */ - @JsonProperty("schedule_id") private String scheduleId; /** Subscriber details for users and destinations to be added as subscribers to the schedule. */ - @JsonProperty("subscriber") private Subscriber subscriber; /** UUID identifying the subscription. */ - @JsonProperty("subscription_id") private String subscriptionId; /** A timestamp indicating when the subscription was last updated. */ - @JsonProperty("update_time") private String updateTime; public Subscription setCreateTime(String createTime) { @@ -160,4 +163,51 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + SubscriptionPb toPb() { + SubscriptionPb pb = new SubscriptionPb(); + pb.setCreateTime(createTime); + pb.setCreatedByUserId(createdByUserId); + pb.setDashboardId(dashboardId); + pb.setEtag(etag); + pb.setScheduleId(scheduleId); + pb.setSubscriber(subscriber); + pb.setSubscriptionId(subscriptionId); + pb.setUpdateTime(updateTime); + + return pb; + } + + static Subscription fromPb(SubscriptionPb pb) { + Subscription model = new Subscription(); + model.setCreateTime(pb.getCreateTime()); + model.setCreatedByUserId(pb.getCreatedByUserId()); + model.setDashboardId(pb.getDashboardId()); + model.setEtag(pb.getEtag()); + model.setScheduleId(pb.getScheduleId()); + model.setSubscriber(pb.getSubscriber()); + model.setSubscriptionId(pb.getSubscriptionId()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class SubscriptionSerializer extends JsonSerializer { + @Override + public void serialize(Subscription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriptionDeserializer extends JsonDeserializer { + @Override + public Subscription deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriptionPb pb = mapper.readValue(p, SubscriptionPb.class); + return Subscription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionPb.java new file mode 100755 index 000000000..0743f7568 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SubscriptionPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("created_by_user_id") + private Long createdByUserId; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("schedule_id") + private String scheduleId; + + @JsonProperty("subscriber") + private Subscriber subscriber; + + @JsonProperty("subscription_id") + private String subscriptionId; + + @JsonProperty("update_time") + private String updateTime; + + public SubscriptionPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public SubscriptionPb setCreatedByUserId(Long createdByUserId) { + this.createdByUserId = createdByUserId; + return this; + } + + public Long getCreatedByUserId() { + return createdByUserId; + } + + public SubscriptionPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public SubscriptionPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public SubscriptionPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + public SubscriptionPb setSubscriber(Subscriber subscriber) { + this.subscriber = subscriber; + return this; + } + + public Subscriber getSubscriber() { + return subscriber; + } + + public SubscriptionPb setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + public SubscriptionPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionPb that = (SubscriptionPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(createdByUserId, that.createdByUserId) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(etag, that.etag) + && Objects.equals(scheduleId, that.scheduleId) + && Objects.equals(subscriber, that.subscriber) + && Objects.equals(subscriptionId, that.subscriptionId) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + createdByUserId, + dashboardId, + etag, + scheduleId, + subscriber, + subscriptionId, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionPb.class) + .add("createTime", createTime) + .add("createdByUserId", createdByUserId) + .add("dashboardId", dashboardId) + .add("etag", etag) + .add("scheduleId", scheduleId) + .add("subscriber", subscriber) + .add("subscriptionId", subscriptionId) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java index cfdbdda70..819b167e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = SubscriptionSubscriberDestination.SubscriptionSubscriberDestinationSerializer.class) +@JsonDeserialize( + using = SubscriptionSubscriberDestination.SubscriptionSubscriberDestinationDeserializer.class) public class SubscriptionSubscriberDestination { /** The canonical identifier of the destination to receive email notification. */ - @JsonProperty("destination_id") private String destinationId; public SubscriptionSubscriberDestination setDestinationId(String destinationId) { @@ -41,4 +53,42 @@ public String toString() { .add("destinationId", destinationId) .toString(); } + + SubscriptionSubscriberDestinationPb toPb() { + SubscriptionSubscriberDestinationPb pb = new SubscriptionSubscriberDestinationPb(); + pb.setDestinationId(destinationId); + + return pb; + } + + static SubscriptionSubscriberDestination fromPb(SubscriptionSubscriberDestinationPb pb) { + SubscriptionSubscriberDestination model = new SubscriptionSubscriberDestination(); + model.setDestinationId(pb.getDestinationId()); + + return model; + } + + public static class SubscriptionSubscriberDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + SubscriptionSubscriberDestination value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriptionSubscriberDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriptionSubscriberDestinationDeserializer + extends JsonDeserializer { + @Override + public SubscriptionSubscriberDestination deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriptionSubscriberDestinationPb pb = + mapper.readValue(p, SubscriptionSubscriberDestinationPb.class); + return SubscriptionSubscriberDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestinationPb.java new file mode 100755 index 000000000..f757342ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestinationPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SubscriptionSubscriberDestinationPb { + @JsonProperty("destination_id") + private String destinationId; + + public SubscriptionSubscriberDestinationPb setDestinationId(String destinationId) { + this.destinationId = destinationId; + return this; + } + + public String getDestinationId() { + return destinationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionSubscriberDestinationPb that = (SubscriptionSubscriberDestinationPb) o; + return Objects.equals(destinationId, that.destinationId); + } + + @Override + public int hashCode() { + return Objects.hash(destinationId); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionSubscriberDestinationPb.class) + .add("destinationId", destinationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java index 0338eac01..965c2db32 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SubscriptionSubscriberUser.SubscriptionSubscriberUserSerializer.class) +@JsonDeserialize(using = SubscriptionSubscriberUser.SubscriptionSubscriberUserDeserializer.class) public class SubscriptionSubscriberUser { /** UserId of the subscriber. */ - @JsonProperty("user_id") private Long userId; public SubscriptionSubscriberUser setUserId(Long userId) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(SubscriptionSubscriberUser.class).add("userId", userId).toString(); } + + SubscriptionSubscriberUserPb toPb() { + SubscriptionSubscriberUserPb pb = new SubscriptionSubscriberUserPb(); + pb.setUserId(userId); + + return pb; + } + + static SubscriptionSubscriberUser fromPb(SubscriptionSubscriberUserPb pb) { + SubscriptionSubscriberUser model = new SubscriptionSubscriberUser(); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class SubscriptionSubscriberUserSerializer + extends JsonSerializer { + @Override + public void serialize( + SubscriptionSubscriberUser value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriptionSubscriberUserPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriptionSubscriberUserDeserializer + extends JsonDeserializer { + @Override + public SubscriptionSubscriberUser deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriptionSubscriberUserPb pb = mapper.readValue(p, SubscriptionSubscriberUserPb.class); + return SubscriptionSubscriberUser.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUserPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUserPb.java new file mode 100755 index 000000000..e7fa0839d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUserPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SubscriptionSubscriberUserPb { + @JsonProperty("user_id") + private Long userId; + + public SubscriptionSubscriberUserPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionSubscriberUserPb that = (SubscriptionSubscriberUserPb) o; + return Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(userId); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionSubscriberUserPb.class).add("userId", userId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java deleted file mode 100755 index c54d199d3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java +++ /dev/null @@ -1,61 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class SuccessStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - /** Whether the query result is truncated (either by byte limit or row limit) */ - @JsonProperty("truncated") - private Boolean truncated; - - public SuccessStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - public SuccessStatus setTruncated(Boolean truncated) { - this.truncated = truncated; - return this; - } - - public Boolean getTruncated() { - return truncated; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SuccessStatus that = (SuccessStatus) o; - return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken, truncated); - } - - @Override - public String toString() { - return new ToStringer(SuccessStatus.class) - .add("dataToken", dataToken) - .add("truncated", truncated) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java index d1401e1d7..4af066346 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TextAttachment.TextAttachmentSerializer.class) +@JsonDeserialize(using = TextAttachment.TextAttachmentDeserializer.class) public class TextAttachment { /** AI generated message */ - @JsonProperty("content") private String content; /** */ - @JsonProperty("id") private String id; public TextAttachment setContent(String content) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(TextAttachment.class).add("content", content).add("id", id).toString(); } + + TextAttachmentPb toPb() { + TextAttachmentPb pb = new TextAttachmentPb(); + pb.setContent(content); + pb.setId(id); + + return pb; + } + + static TextAttachment fromPb(TextAttachmentPb pb) { + TextAttachment model = new TextAttachment(); + model.setContent(pb.getContent()); + model.setId(pb.getId()); + + return model; + } + + public static class TextAttachmentSerializer extends JsonSerializer { + @Override + public void serialize(TextAttachment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TextAttachmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TextAttachmentDeserializer extends JsonDeserializer { + @Override + public TextAttachment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TextAttachmentPb pb = mapper.readValue(p, TextAttachmentPb.class); + return TextAttachment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPb.java new file mode 100755 index 000000000..5b03c9811 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TextAttachmentPb { + @JsonProperty("content") + private String content; + + @JsonProperty("id") + private String id; + + public TextAttachmentPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public TextAttachmentPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TextAttachmentPb that = (TextAttachmentPb) o; + return Objects.equals(content, that.content) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(content, id); + } + + @Override + public String toString() { + return new ToStringer(TextAttachmentPb.class).add("content", content).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java index 901212478..6f91e40d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Trash dashboard */ @Generated +@JsonSerialize(using = TrashDashboardRequest.TrashDashboardRequestSerializer.class) +@JsonDeserialize(using = TrashDashboardRequest.TrashDashboardRequestDeserializer.class) public class TrashDashboardRequest { /** UUID identifying the dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; public TrashDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(TrashDashboardRequest.class).add("dashboardId", dashboardId).toString(); } + + TrashDashboardRequestPb toPb() { + TrashDashboardRequestPb pb = new TrashDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static TrashDashboardRequest fromPb(TrashDashboardRequestPb pb) { + TrashDashboardRequest model = new TrashDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class TrashDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + TrashDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrashDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrashDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public TrashDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrashDashboardRequestPb pb = mapper.readValue(p, TrashDashboardRequestPb.class); + return TrashDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequestPb.java new file mode 100755 index 000000000..d9c1e922c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Trash dashboard */ +@Generated +class TrashDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public TrashDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrashDashboardRequestPb that = (TrashDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(TrashDashboardRequestPb.class).add("dashboardId", dashboardId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java index 0f43a3a49..a25e6db3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TrashDashboardResponse.TrashDashboardResponseSerializer.class) +@JsonDeserialize(using = TrashDashboardResponse.TrashDashboardResponseDeserializer.class) public class TrashDashboardResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(TrashDashboardResponse.class).toString(); } + + TrashDashboardResponsePb toPb() { + TrashDashboardResponsePb pb = new TrashDashboardResponsePb(); + + return pb; + } + + static TrashDashboardResponse fromPb(TrashDashboardResponsePb pb) { + TrashDashboardResponse model = new TrashDashboardResponse(); + + return model; + } + + public static class TrashDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + TrashDashboardResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrashDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrashDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public TrashDashboardResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrashDashboardResponsePb pb = mapper.readValue(p, TrashDashboardResponsePb.class); + return TrashDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponsePb.java new file mode 100755 index 000000000..6a6a90fac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class TrashDashboardResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(TrashDashboardResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java index cb8e8e7c4..18673bc72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Unpublish dashboard */ @Generated +@JsonSerialize(using = UnpublishDashboardRequest.UnpublishDashboardRequestSerializer.class) +@JsonDeserialize(using = UnpublishDashboardRequest.UnpublishDashboardRequestDeserializer.class) public class UnpublishDashboardRequest { /** UUID identifying the published dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; public UnpublishDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -41,4 +52,41 @@ public String toString() { .add("dashboardId", dashboardId) .toString(); } + + UnpublishDashboardRequestPb toPb() { + UnpublishDashboardRequestPb pb = new UnpublishDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static UnpublishDashboardRequest fromPb(UnpublishDashboardRequestPb pb) { + UnpublishDashboardRequest model = new UnpublishDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class UnpublishDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UnpublishDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnpublishDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnpublishDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public UnpublishDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnpublishDashboardRequestPb pb = mapper.readValue(p, UnpublishDashboardRequestPb.class); + return UnpublishDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequestPb.java new file mode 100755 index 000000000..c064b84c4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Unpublish dashboard */ +@Generated +class UnpublishDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public UnpublishDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UnpublishDashboardRequestPb that = (UnpublishDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(UnpublishDashboardRequestPb.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java index 211e9c010..c88a2e2d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UnpublishDashboardResponse.UnpublishDashboardResponseSerializer.class) +@JsonDeserialize(using = UnpublishDashboardResponse.UnpublishDashboardResponseDeserializer.class) public class UnpublishDashboardResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UnpublishDashboardResponse.class).toString(); } + + UnpublishDashboardResponsePb toPb() { + UnpublishDashboardResponsePb pb = new UnpublishDashboardResponsePb(); + + return pb; + } + + static UnpublishDashboardResponse fromPb(UnpublishDashboardResponsePb pb) { + UnpublishDashboardResponse model = new UnpublishDashboardResponse(); + + return model; + } + + public static class UnpublishDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UnpublishDashboardResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UnpublishDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UnpublishDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public UnpublishDashboardResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UnpublishDashboardResponsePb pb = mapper.readValue(p, UnpublishDashboardResponsePb.class); + return UnpublishDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponsePb.java new file mode 100755 index 000000000..b913b66f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UnpublishDashboardResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UnpublishDashboardResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java index 84298ffd8..2d2215777 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update dashboard */ @Generated +@JsonSerialize(using = UpdateDashboardRequest.UpdateDashboardRequestSerializer.class) +@JsonDeserialize(using = UpdateDashboardRequest.UpdateDashboardRequestDeserializer.class) public class UpdateDashboardRequest { /** */ - @JsonProperty("dashboard") private Dashboard dashboard; /** UUID identifying the dashboard. */ - @JsonIgnore private String dashboardId; + private String dashboardId; public UpdateDashboardRequest setDashboard(Dashboard dashboard) { this.dashboard = dashboard; @@ -57,4 +66,43 @@ public String toString() { .add("dashboardId", dashboardId) .toString(); } + + UpdateDashboardRequestPb toPb() { + UpdateDashboardRequestPb pb = new UpdateDashboardRequestPb(); + pb.setDashboard(dashboard); + pb.setDashboardId(dashboardId); + + return pb; + } + + static UpdateDashboardRequest fromPb(UpdateDashboardRequestPb pb) { + UpdateDashboardRequest model = new UpdateDashboardRequest(); + model.setDashboard(pb.getDashboard()); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class UpdateDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDashboardRequestPb pb = mapper.readValue(p, UpdateDashboardRequestPb.class); + return UpdateDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequestPb.java new file mode 100755 index 000000000..4434716dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update dashboard */ +@Generated +class UpdateDashboardRequestPb { + @JsonProperty("dashboard") + private Dashboard dashboard; + + @JsonIgnore private String dashboardId; + + public UpdateDashboardRequestPb setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; + return this; + } + + public Dashboard getDashboard() { + return dashboard; + } + + public UpdateDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDashboardRequestPb that = (UpdateDashboardRequestPb) o; + return Objects.equals(dashboard, that.dashboard) + && Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboard, dashboardId); + } + + @Override + public String toString() { + return new ToStringer(UpdateDashboardRequestPb.class) + .add("dashboard", dashboard) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java index cffdc6370..0052158bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update dashboard schedule */ @Generated +@JsonSerialize(using = UpdateScheduleRequest.UpdateScheduleRequestSerializer.class) +@JsonDeserialize(using = UpdateScheduleRequest.UpdateScheduleRequestDeserializer.class) public class UpdateScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** */ - @JsonProperty("schedule") private Schedule schedule; /** UUID identifying the schedule. */ - @JsonIgnore private String scheduleId; + private String scheduleId; public UpdateScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -71,4 +80,45 @@ public String toString() { .add("scheduleId", scheduleId) .toString(); } + + UpdateScheduleRequestPb toPb() { + UpdateScheduleRequestPb pb = new UpdateScheduleRequestPb(); + pb.setDashboardId(dashboardId); + pb.setSchedule(schedule); + pb.setScheduleId(scheduleId); + + return pb; + } + + static UpdateScheduleRequest fromPb(UpdateScheduleRequestPb pb) { + UpdateScheduleRequest model = new UpdateScheduleRequest(); + model.setDashboardId(pb.getDashboardId()); + model.setSchedule(pb.getSchedule()); + model.setScheduleId(pb.getScheduleId()); + + return model; + } + + public static class UpdateScheduleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateScheduleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateScheduleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateScheduleRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateScheduleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateScheduleRequestPb pb = mapper.readValue(p, UpdateScheduleRequestPb.class); + return UpdateScheduleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequestPb.java new file mode 100755 index 000000000..af2caae56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update dashboard schedule */ +@Generated +class UpdateScheduleRequestPb { + @JsonIgnore private String dashboardId; + + @JsonProperty("schedule") + private Schedule schedule; + + @JsonIgnore private String scheduleId; + + public UpdateScheduleRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public UpdateScheduleRequestPb setSchedule(Schedule schedule) { + this.schedule = schedule; + return this; + } + + public Schedule getSchedule() { + return schedule; + } + + public UpdateScheduleRequestPb setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public String getScheduleId() { + return scheduleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateScheduleRequestPb that = (UpdateScheduleRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(schedule, that.schedule) + && Objects.equals(scheduleId, that.scheduleId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, schedule, scheduleId); + } + + @Override + public String toString() { + return new ToStringer(UpdateScheduleRequestPb.class) + .add("dashboardId", dashboardId) + .add("schedule", schedule) + .add("scheduleId", scheduleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Converters.java new file mode 100755 index 000000000..57c2bc6ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.database; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java new file mode 100755 index 000000000..869242f38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Create a Database Catalog */ +@Generated +@JsonSerialize(using = CreateDatabaseCatalogRequest.CreateDatabaseCatalogRequestSerializer.class) +@JsonDeserialize( + using = CreateDatabaseCatalogRequest.CreateDatabaseCatalogRequestDeserializer.class) +public class CreateDatabaseCatalogRequest { + /** */ + private DatabaseCatalog catalog; + + public CreateDatabaseCatalogRequest setCatalog(DatabaseCatalog catalog) { + this.catalog = catalog; + return this; + } + + public DatabaseCatalog getCatalog() { + return catalog; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseCatalogRequest that = (CreateDatabaseCatalogRequest) o; + return Objects.equals(catalog, that.catalog); + } + + @Override + public int hashCode() { + return Objects.hash(catalog); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseCatalogRequest.class).add("catalog", catalog).toString(); + } + + CreateDatabaseCatalogRequestPb toPb() { + CreateDatabaseCatalogRequestPb pb = new CreateDatabaseCatalogRequestPb(); + pb.setCatalog(catalog); + + return pb; + } + + static CreateDatabaseCatalogRequest fromPb(CreateDatabaseCatalogRequestPb pb) { + CreateDatabaseCatalogRequest model = new CreateDatabaseCatalogRequest(); + model.setCatalog(pb.getCatalog()); + + return model; + } + + public static class CreateDatabaseCatalogRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDatabaseCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDatabaseCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDatabaseCatalogRequestDeserializer + extends JsonDeserializer { + @Override + public CreateDatabaseCatalogRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDatabaseCatalogRequestPb pb = mapper.readValue(p, CreateDatabaseCatalogRequestPb.class); + return CreateDatabaseCatalogRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequestPb.java similarity index 69% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequestPb.java index d58ef38df..87a8e2d6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,12 +9,11 @@ /** Create a Database Catalog */ @Generated -public class CreateDatabaseCatalogRequest { - /** */ +class CreateDatabaseCatalogRequestPb { @JsonProperty("catalog") private DatabaseCatalog catalog; - public CreateDatabaseCatalogRequest setCatalog(DatabaseCatalog catalog) { + public CreateDatabaseCatalogRequestPb setCatalog(DatabaseCatalog catalog) { this.catalog = catalog; return this; } @@ -27,7 +26,7 @@ public DatabaseCatalog getCatalog() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CreateDatabaseCatalogRequest that = (CreateDatabaseCatalogRequest) o; + CreateDatabaseCatalogRequestPb that = (CreateDatabaseCatalogRequestPb) o; return Objects.equals(catalog, that.catalog); } @@ -38,6 +37,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateDatabaseCatalogRequest.class).add("catalog", catalog).toString(); + return new ToStringer(CreateDatabaseCatalogRequestPb.class).add("catalog", catalog).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java new file mode 100755 index 000000000..bfec9c7ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Create a Database Instance */ +@Generated +@JsonSerialize(using = CreateDatabaseInstanceRequest.CreateDatabaseInstanceRequestSerializer.class) +@JsonDeserialize( + using = CreateDatabaseInstanceRequest.CreateDatabaseInstanceRequestDeserializer.class) +public class CreateDatabaseInstanceRequest { + /** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and + * storage. + */ + private DatabaseInstance databaseInstance; + + public CreateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + this.databaseInstance = databaseInstance; + return this; + } + + public DatabaseInstance getDatabaseInstance() { + return databaseInstance; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseInstanceRequest that = (CreateDatabaseInstanceRequest) o; + return Objects.equals(databaseInstance, that.databaseInstance); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstance); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseInstanceRequest.class) + .add("databaseInstance", databaseInstance) + .toString(); + } + + CreateDatabaseInstanceRequestPb toPb() { + CreateDatabaseInstanceRequestPb pb = new CreateDatabaseInstanceRequestPb(); + pb.setDatabaseInstance(databaseInstance); + + return pb; + } + + static CreateDatabaseInstanceRequest fromPb(CreateDatabaseInstanceRequestPb pb) { + CreateDatabaseInstanceRequest model = new CreateDatabaseInstanceRequest(); + model.setDatabaseInstance(pb.getDatabaseInstance()); + + return model; + } + + public static class CreateDatabaseInstanceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDatabaseInstanceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDatabaseInstanceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDatabaseInstanceRequestDeserializer + extends JsonDeserializer { + @Override + public CreateDatabaseInstanceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDatabaseInstanceRequestPb pb = + mapper.readValue(p, CreateDatabaseInstanceRequestPb.class); + return CreateDatabaseInstanceRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequestPb.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequestPb.java index 2aa9d2a71..627ca6211 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,15 +9,11 @@ /** Create a Database Instance */ @Generated -public class CreateDatabaseInstanceRequest { - /** - * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and - * storage. - */ +class CreateDatabaseInstanceRequestPb { @JsonProperty("database_instance") private DatabaseInstance databaseInstance; - public CreateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + public CreateDatabaseInstanceRequestPb setDatabaseInstance(DatabaseInstance databaseInstance) { this.databaseInstance = databaseInstance; return this; } @@ -30,7 +26,7 @@ public DatabaseInstance getDatabaseInstance() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CreateDatabaseInstanceRequest that = (CreateDatabaseInstanceRequest) o; + CreateDatabaseInstanceRequestPb that = (CreateDatabaseInstanceRequestPb) o; return Objects.equals(databaseInstance, that.databaseInstance); } @@ -41,7 +37,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateDatabaseInstanceRequest.class) + return new ToStringer(CreateDatabaseInstanceRequestPb.class) .add("databaseInstance", databaseInstance) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java new file mode 100755 index 000000000..e4b0a766f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Create a Database Table */ +@Generated +@JsonSerialize(using = CreateDatabaseTableRequest.CreateDatabaseTableRequestSerializer.class) +@JsonDeserialize(using = CreateDatabaseTableRequest.CreateDatabaseTableRequestDeserializer.class) +public class CreateDatabaseTableRequest { + /** Next field marker: 13 */ + private DatabaseTable table; + + public CreateDatabaseTableRequest setTable(DatabaseTable table) { + this.table = table; + return this; + } + + public DatabaseTable getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseTableRequest that = (CreateDatabaseTableRequest) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseTableRequest.class).add("table", table).toString(); + } + + CreateDatabaseTableRequestPb toPb() { + CreateDatabaseTableRequestPb pb = new CreateDatabaseTableRequestPb(); + pb.setTable(table); + + return pb; + } + + static CreateDatabaseTableRequest fromPb(CreateDatabaseTableRequestPb pb) { + CreateDatabaseTableRequest model = new CreateDatabaseTableRequest(); + model.setTable(pb.getTable()); + + return model; + } + + public static class CreateDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public CreateDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDatabaseTableRequestPb pb = mapper.readValue(p, CreateDatabaseTableRequestPb.class); + return CreateDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequestPb.java new file mode 100755 index 000000000..59de99d60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequestPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a Database Table */ +@Generated +class CreateDatabaseTableRequestPb { + @JsonProperty("table") + private DatabaseTable table; + + public CreateDatabaseTableRequestPb setTable(DatabaseTable table) { + this.table = table; + return this; + } + + public DatabaseTable getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseTableRequestPb that = (CreateDatabaseTableRequestPb) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseTableRequestPb.class).add("table", table).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..c48719820 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Create a Synced Database Table */ +@Generated +@JsonSerialize( + using = CreateSyncedDatabaseTableRequest.CreateSyncedDatabaseTableRequestSerializer.class) +@JsonDeserialize( + using = CreateSyncedDatabaseTableRequest.CreateSyncedDatabaseTableRequestDeserializer.class) +public class CreateSyncedDatabaseTableRequest { + /** Next field marker: 12 */ + private SyncedDatabaseTable syncedTable; + + public CreateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) { + this.syncedTable = syncedTable; + return this; + } + + public SyncedDatabaseTable getSyncedTable() { + return syncedTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSyncedDatabaseTableRequest that = (CreateSyncedDatabaseTableRequest) o; + return Objects.equals(syncedTable, that.syncedTable); + } + + @Override + public int hashCode() { + return Objects.hash(syncedTable); + } + + @Override + public String toString() { + return new ToStringer(CreateSyncedDatabaseTableRequest.class) + .add("syncedTable", syncedTable) + .toString(); + } + + CreateSyncedDatabaseTableRequestPb toPb() { + CreateSyncedDatabaseTableRequestPb pb = new CreateSyncedDatabaseTableRequestPb(); + pb.setSyncedTable(syncedTable); + + return pb; + } + + static CreateSyncedDatabaseTableRequest fromPb(CreateSyncedDatabaseTableRequestPb pb) { + CreateSyncedDatabaseTableRequest model = new CreateSyncedDatabaseTableRequest(); + model.setSyncedTable(pb.getSyncedTable()); + + return model; + } + + public static class CreateSyncedDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateSyncedDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateSyncedDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateSyncedDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public CreateSyncedDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateSyncedDatabaseTableRequestPb pb = + mapper.readValue(p, CreateSyncedDatabaseTableRequestPb.class); + return CreateSyncedDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequestPb.java similarity index 71% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequestPb.java index 2c1ea4700..78e850f3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,12 +9,11 @@ /** Create a Synced Database Table */ @Generated -public class CreateSyncedDatabaseTableRequest { - /** Next field marker: 10 */ +class CreateSyncedDatabaseTableRequestPb { @JsonProperty("synced_table") private SyncedDatabaseTable syncedTable; - public CreateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) { + public CreateSyncedDatabaseTableRequestPb setSyncedTable(SyncedDatabaseTable syncedTable) { this.syncedTable = syncedTable; return this; } @@ -27,7 +26,7 @@ public SyncedDatabaseTable getSyncedTable() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CreateSyncedDatabaseTableRequest that = (CreateSyncedDatabaseTableRequest) o; + CreateSyncedDatabaseTableRequestPb that = (CreateSyncedDatabaseTableRequestPb) o; return Objects.equals(syncedTable, that.syncedTable); } @@ -38,7 +37,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateSyncedDatabaseTableRequest.class) + return new ToStringer(CreateSyncedDatabaseTableRequestPb.class) .add("syncedTable", syncedTable) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java similarity index 77% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java index c91d638d3..1d452de83 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; @@ -9,18 +9,18 @@ /** Database Instances provide access to a database via REST API or direct SQL. */ @Generated -public class DatabaseInstancesAPI { - private static final Logger LOG = LoggerFactory.getLogger(DatabaseInstancesAPI.class); +public class DatabaseAPI { + private static final Logger LOG = LoggerFactory.getLogger(DatabaseAPI.class); - private final DatabaseInstancesService impl; + private final DatabaseService impl; /** Regular-use constructor */ - public DatabaseInstancesAPI(ApiClient apiClient) { - impl = new DatabaseInstancesImpl(apiClient); + public DatabaseAPI(ApiClient apiClient) { + impl = new DatabaseImpl(apiClient); } /** Constructor for mocks */ - public DatabaseInstancesAPI(DatabaseInstancesService mock) { + public DatabaseAPI(DatabaseService mock) { impl = mock; } @@ -43,6 +43,15 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req return impl.createDatabaseInstance(request); } + public DatabaseTable createDatabaseTable(DatabaseTable table) { + return createDatabaseTable(new CreateDatabaseTableRequest().setTable(table)); + } + + /** Create a Database Table. */ + public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) { + return impl.createDatabaseTable(request); + } + public SyncedDatabaseTable createSyncedDatabaseTable(SyncedDatabaseTable syncedTable) { return createSyncedDatabaseTable( new CreateSyncedDatabaseTableRequest().setSyncedTable(syncedTable)); @@ -71,6 +80,15 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { impl.deleteDatabaseInstance(request); } + public void deleteDatabaseTable(String name) { + deleteDatabaseTable(new DeleteDatabaseTableRequest().setName(name)); + } + + /** Delete a Database Table. */ + public void deleteDatabaseTable(DeleteDatabaseTableRequest request) { + impl.deleteDatabaseTable(request); + } + public void deleteSyncedDatabaseTable(String name) { deleteSyncedDatabaseTable(new DeleteSyncedDatabaseTableRequest().setName(name)); } @@ -85,6 +103,11 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque return impl.findDatabaseInstanceByUid(request); } + /** Generates a credential that can be used to access database instances. */ + public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) { + return impl.generateDatabaseCredential(request); + } + public DatabaseCatalog getDatabaseCatalog(String name) { return getDatabaseCatalog(new GetDatabaseCatalogRequest().setName(name)); } @@ -103,6 +126,15 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) return impl.getDatabaseInstance(request); } + public DatabaseTable getDatabaseTable(String name) { + return getDatabaseTable(new GetDatabaseTableRequest().setName(name)); + } + + /** Get a Database Table. */ + public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) { + return impl.getDatabaseTable(request); + } + public SyncedDatabaseTable getSyncedDatabaseTable(String name) { return getSyncedDatabaseTable(new GetSyncedDatabaseTableRequest().setName(name)); } @@ -141,7 +173,7 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req return impl.updateDatabaseInstance(request); } - public DatabaseInstancesService impl() { + public DatabaseService impl() { return impl; } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java new file mode 100755 index 000000000..e3ee9a416 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java @@ -0,0 +1,152 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DatabaseCatalog.DatabaseCatalogSerializer.class) +@JsonDeserialize(using = DatabaseCatalog.DatabaseCatalogDeserializer.class) +public class DatabaseCatalog { + /** */ + private Boolean createDatabaseIfNotExists; + + /** The name of the DatabaseInstance housing the database. */ + private String databaseInstanceName; + + /** The name of the database (in a instance) associated with the catalog. */ + private String databaseName; + + /** The name of the catalog in UC. */ + private String name; + + /** */ + private String uid; + + public DatabaseCatalog setCreateDatabaseIfNotExists(Boolean createDatabaseIfNotExists) { + this.createDatabaseIfNotExists = createDatabaseIfNotExists; + return this; + } + + public Boolean getCreateDatabaseIfNotExists() { + return createDatabaseIfNotExists; + } + + public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public DatabaseCatalog setDatabaseName(String databaseName) { + this.databaseName = databaseName; + return this; + } + + public String getDatabaseName() { + return databaseName; + } + + public DatabaseCatalog setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseCatalog setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseCatalog that = (DatabaseCatalog) o; + return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists) + && Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(databaseName, that.databaseName) + && Objects.equals(name, that.name) + && Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash(createDatabaseIfNotExists, databaseInstanceName, databaseName, name, uid); + } + + @Override + public String toString() { + return new ToStringer(DatabaseCatalog.class) + .add("createDatabaseIfNotExists", createDatabaseIfNotExists) + .add("databaseInstanceName", databaseInstanceName) + .add("databaseName", databaseName) + .add("name", name) + .add("uid", uid) + .toString(); + } + + DatabaseCatalogPb toPb() { + DatabaseCatalogPb pb = new DatabaseCatalogPb(); + pb.setCreateDatabaseIfNotExists(createDatabaseIfNotExists); + pb.setDatabaseInstanceName(databaseInstanceName); + pb.setDatabaseName(databaseName); + pb.setName(name); + pb.setUid(uid); + + return pb; + } + + static DatabaseCatalog fromPb(DatabaseCatalogPb pb) { + DatabaseCatalog model = new DatabaseCatalog(); + model.setCreateDatabaseIfNotExists(pb.getCreateDatabaseIfNotExists()); + model.setDatabaseInstanceName(pb.getDatabaseInstanceName()); + model.setDatabaseName(pb.getDatabaseName()); + model.setName(pb.getName()); + model.setUid(pb.getUid()); + + return model; + } + + public static class DatabaseCatalogSerializer extends JsonSerializer { + @Override + public void serialize(DatabaseCatalog value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabaseCatalogPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabaseCatalogDeserializer extends JsonDeserializer { + @Override + public DatabaseCatalog deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabaseCatalogPb pb = mapper.readValue(p, DatabaseCatalogPb.class); + return DatabaseCatalog.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalogPb.java similarity index 75% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalogPb.java index b4d1c2d57..a8a21a7c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalogPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -8,28 +8,23 @@ import java.util.Objects; @Generated -public class DatabaseCatalog { - /** */ +class DatabaseCatalogPb { @JsonProperty("create_database_if_not_exists") private Boolean createDatabaseIfNotExists; - /** The name of the DatabaseInstance housing the database. */ @JsonProperty("database_instance_name") private String databaseInstanceName; - /** The name of the database (in a instance) associated with the catalog. */ @JsonProperty("database_name") private String databaseName; - /** The name of the catalog in UC. */ @JsonProperty("name") private String name; - /** */ @JsonProperty("uid") private String uid; - public DatabaseCatalog setCreateDatabaseIfNotExists(Boolean createDatabaseIfNotExists) { + public DatabaseCatalogPb setCreateDatabaseIfNotExists(Boolean createDatabaseIfNotExists) { this.createDatabaseIfNotExists = createDatabaseIfNotExists; return this; } @@ -38,7 +33,7 @@ public Boolean getCreateDatabaseIfNotExists() { return createDatabaseIfNotExists; } - public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) { + public DatabaseCatalogPb setDatabaseInstanceName(String databaseInstanceName) { this.databaseInstanceName = databaseInstanceName; return this; } @@ -47,7 +42,7 @@ public String getDatabaseInstanceName() { return databaseInstanceName; } - public DatabaseCatalog setDatabaseName(String databaseName) { + public DatabaseCatalogPb setDatabaseName(String databaseName) { this.databaseName = databaseName; return this; } @@ -56,7 +51,7 @@ public String getDatabaseName() { return databaseName; } - public DatabaseCatalog setName(String name) { + public DatabaseCatalogPb setName(String name) { this.name = name; return this; } @@ -65,7 +60,7 @@ public String getName() { return name; } - public DatabaseCatalog setUid(String uid) { + public DatabaseCatalogPb setUid(String uid) { this.uid = uid; return this; } @@ -78,7 +73,7 @@ public String getUid() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - DatabaseCatalog that = (DatabaseCatalog) o; + DatabaseCatalogPb that = (DatabaseCatalogPb) o; return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists) && Objects.equals(databaseInstanceName, that.databaseInstanceName) && Objects.equals(databaseName, that.databaseName) @@ -93,7 +88,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DatabaseCatalog.class) + return new ToStringer(DatabaseCatalogPb.class) .add("createDatabaseIfNotExists", createDatabaseIfNotExists) .add("databaseInstanceName", databaseInstanceName) .add("databaseName", databaseName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java new file mode 100755 index 000000000..3e53880d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DatabaseCredential.DatabaseCredentialSerializer.class) +@JsonDeserialize(using = DatabaseCredential.DatabaseCredentialDeserializer.class) +public class DatabaseCredential { + /** */ + private String token; + + public DatabaseCredential setToken(String token) { + this.token = token; + return this; + } + + public String getToken() { + return token; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseCredential that = (DatabaseCredential) o; + return Objects.equals(token, that.token); + } + + @Override + public int hashCode() { + return Objects.hash(token); + } + + @Override + public String toString() { + return new ToStringer(DatabaseCredential.class).add("token", token).toString(); + } + + DatabaseCredentialPb toPb() { + DatabaseCredentialPb pb = new DatabaseCredentialPb(); + pb.setToken(token); + + return pb; + } + + static DatabaseCredential fromPb(DatabaseCredentialPb pb) { + DatabaseCredential model = new DatabaseCredential(); + model.setToken(pb.getToken()); + + return model; + } + + public static class DatabaseCredentialSerializer extends JsonSerializer { + @Override + public void serialize(DatabaseCredential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabaseCredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabaseCredentialDeserializer extends JsonDeserializer { + @Override + public DatabaseCredential deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabaseCredentialPb pb = mapper.readValue(p, DatabaseCredentialPb.class); + return DatabaseCredential.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredentialPb.java new file mode 100755 index 000000000..37acbf748 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredentialPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DatabaseCredentialPb { + @JsonProperty("token") + private String token; + + public DatabaseCredentialPb setToken(String token) { + this.token = token; + return this; + } + + public String getToken() { + return token; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseCredentialPb that = (DatabaseCredentialPb) o; + return Objects.equals(token, that.token); + } + + @Override + public int hashCode() { + return Objects.hash(token); + } + + @Override + public String toString() { + return new ToStringer(DatabaseCredentialPb.class).add("token", token).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java index bea3e0708..5a18cc21b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.DatabricksException; @@ -7,12 +7,12 @@ import com.databricks.sdk.support.Generated; import java.io.IOException; -/** Package-local implementation of DatabaseInstances */ +/** Package-local implementation of Database */ @Generated -class DatabaseInstancesImpl implements DatabaseInstancesService { +class DatabaseImpl implements DatabaseService { private final ApiClient apiClient; - public DatabaseInstancesImpl(ApiClient apiClient) { + public DatabaseImpl(ApiClient apiClient) { this.apiClient = apiClient; } @@ -21,7 +21,7 @@ public DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest reques String path = "/api/2.0/database/catalogs"; try { Request req = new Request("POST", path, apiClient.serialize(request.getCatalog())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DatabaseCatalog.class); @@ -35,7 +35,7 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req String path = "/api/2.0/database/instances"; try { Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseInstance())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DatabaseInstance.class); @@ -44,12 +44,26 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req } } + @Override + public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) { + String path = "/api/2.0/database/tables"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getTable())); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) { String path = "/api/2.0/database/synced_tables"; try { Request req = new Request("POST", path, apiClient.serialize(request.getSyncedTable())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SyncedDatabaseTable.class); @@ -63,7 +77,7 @@ public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) { String path = String.format("/api/2.0/database/catalogs/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteDatabaseCatalogResponse.class); } catch (IOException e) { @@ -76,7 +90,7 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { String path = String.format("/api/2.0/database/instances/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteDatabaseInstanceResponse.class); } catch (IOException e) { @@ -84,12 +98,25 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { } } + @Override + public void deleteDatabaseTable(DeleteDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/tables/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteDatabaseTableResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) { String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteSyncedDatabaseTableResponse.class); } catch (IOException e) { @@ -102,7 +129,7 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque String path = "/api/2.0/database/instances:findByUid"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DatabaseInstance.class); } catch (IOException e) { @@ -110,12 +137,26 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque } } + @Override + public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) { + String path = "/api/2.0/database/credentials"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseCredential.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) { String path = String.format("/api/2.0/database/catalogs/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DatabaseCatalog.class); } catch (IOException e) { @@ -128,7 +169,7 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) String path = String.format("/api/2.0/database/instances/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DatabaseInstance.class); } catch (IOException e) { @@ -136,12 +177,25 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) } } + @Override + public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/tables/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) { String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SyncedDatabaseTable.class); } catch (IOException e) { @@ -154,7 +208,7 @@ public ListDatabaseInstancesResponse listDatabaseInstances(ListDatabaseInstances String path = "/api/2.0/database/instances"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListDatabaseInstancesResponse.class); } catch (IOException e) { @@ -167,7 +221,7 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req String path = String.format("/api/2.0/database/instances/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseInstance())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DatabaseInstance.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java similarity index 59% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java index ce72b3cba..9d1ef0a98 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java @@ -1,79 +1,55 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. */ @Generated +@JsonSerialize(using = DatabaseInstance.DatabaseInstanceSerializer.class) +@JsonDeserialize(using = DatabaseInstance.DatabaseInstanceDeserializer.class) public class DatabaseInstance { - /** Password for admin user to create. If not provided, no user will be created. */ - @JsonProperty("admin_password") - private String adminPassword; - - /** Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'. */ - @JsonProperty("admin_rolename") - private String adminRolename; - /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4". */ - @JsonProperty("capacity") private String capacity; /** The timestamp when the instance was created. */ - @JsonProperty("creation_time") private String creationTime; /** The email of the creator of the instance. */ - @JsonProperty("creator") private String creator; /** The name of the instance. This is the unique identifier for the instance. */ - @JsonProperty("name") private String name; /** The version of Postgres running on the instance. */ - @JsonProperty("pg_version") private String pgVersion; /** The DNS endpoint to connect to the instance for read+write access. */ - @JsonProperty("read_write_dns") private String readWriteDns; /** The current state of the instance. */ - @JsonProperty("state") private DatabaseInstanceState state; /** Whether the instance is stopped. */ - @JsonProperty("stopped") private Boolean stopped; /** An immutable UUID identifier for the instance. */ - @JsonProperty("uid") private String uid; - public DatabaseInstance setAdminPassword(String adminPassword) { - this.adminPassword = adminPassword; - return this; - } - - public String getAdminPassword() { - return adminPassword; - } - - public DatabaseInstance setAdminRolename(String adminRolename) { - this.adminRolename = adminRolename; - return this; - } - - public String getAdminRolename() { - return adminRolename; - } - public DatabaseInstance setCapacity(String capacity) { this.capacity = capacity; return this; @@ -160,9 +136,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DatabaseInstance that = (DatabaseInstance) o; - return Objects.equals(adminPassword, that.adminPassword) - && Objects.equals(adminRolename, that.adminRolename) - && Objects.equals(capacity, that.capacity) + return Objects.equals(capacity, that.capacity) && Objects.equals(creationTime, that.creationTime) && Objects.equals(creator, that.creator) && Objects.equals(name, that.name) @@ -176,24 +150,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - adminPassword, - adminRolename, - capacity, - creationTime, - creator, - name, - pgVersion, - readWriteDns, - state, - stopped, - uid); + capacity, creationTime, creator, name, pgVersion, readWriteDns, state, stopped, uid); } @Override public String toString() { return new ToStringer(DatabaseInstance.class) - .add("adminPassword", adminPassword) - .add("adminRolename", adminRolename) .add("capacity", capacity) .add("creationTime", creationTime) .add("creator", creator) @@ -205,4 +167,54 @@ public String toString() { .add("uid", uid) .toString(); } + + DatabaseInstancePb toPb() { + DatabaseInstancePb pb = new DatabaseInstancePb(); + pb.setCapacity(capacity); + pb.setCreationTime(creationTime); + pb.setCreator(creator); + pb.setName(name); + pb.setPgVersion(pgVersion); + pb.setReadWriteDns(readWriteDns); + pb.setState(state); + pb.setStopped(stopped); + pb.setUid(uid); + + return pb; + } + + static DatabaseInstance fromPb(DatabaseInstancePb pb) { + DatabaseInstance model = new DatabaseInstance(); + model.setCapacity(pb.getCapacity()); + model.setCreationTime(pb.getCreationTime()); + model.setCreator(pb.getCreator()); + model.setName(pb.getName()); + model.setPgVersion(pb.getPgVersion()); + model.setReadWriteDns(pb.getReadWriteDns()); + model.setState(pb.getState()); + model.setStopped(pb.getStopped()); + model.setUid(pb.getUid()); + + return model; + } + + public static class DatabaseInstanceSerializer extends JsonSerializer { + @Override + public void serialize(DatabaseInstance value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabaseInstancePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabaseInstanceDeserializer extends JsonDeserializer { + @Override + public DatabaseInstance deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabaseInstancePb pb = mapper.readValue(p, DatabaseInstancePb.class); + return DatabaseInstance.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstancePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstancePb.java new file mode 100755 index 000000000..d0505f4b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstancePb.java @@ -0,0 +1,159 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + */ +@Generated +class DatabaseInstancePb { + @JsonProperty("capacity") + private String capacity; + + @JsonProperty("creation_time") + private String creationTime; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("name") + private String name; + + @JsonProperty("pg_version") + private String pgVersion; + + @JsonProperty("read_write_dns") + private String readWriteDns; + + @JsonProperty("state") + private DatabaseInstanceState state; + + @JsonProperty("stopped") + private Boolean stopped; + + @JsonProperty("uid") + private String uid; + + public DatabaseInstancePb setCapacity(String capacity) { + this.capacity = capacity; + return this; + } + + public String getCapacity() { + return capacity; + } + + public DatabaseInstancePb setCreationTime(String creationTime) { + this.creationTime = creationTime; + return this; + } + + public String getCreationTime() { + return creationTime; + } + + public DatabaseInstancePb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public DatabaseInstancePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseInstancePb setPgVersion(String pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public String getPgVersion() { + return pgVersion; + } + + public DatabaseInstancePb setReadWriteDns(String readWriteDns) { + this.readWriteDns = readWriteDns; + return this; + } + + public String getReadWriteDns() { + return readWriteDns; + } + + public DatabaseInstancePb setState(DatabaseInstanceState state) { + this.state = state; + return this; + } + + public DatabaseInstanceState getState() { + return state; + } + + public DatabaseInstancePb setStopped(Boolean stopped) { + this.stopped = stopped; + return this; + } + + public Boolean getStopped() { + return stopped; + } + + public DatabaseInstancePb setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseInstancePb that = (DatabaseInstancePb) o; + return Objects.equals(capacity, that.capacity) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(creator, that.creator) + && Objects.equals(name, that.name) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(readWriteDns, that.readWriteDns) + && Objects.equals(state, that.state) + && Objects.equals(stopped, that.stopped) + && Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash( + capacity, creationTime, creator, name, pgVersion, readWriteDns, state, stopped, uid); + } + + @Override + public String toString() { + return new ToStringer(DatabaseInstancePb.class) + .add("capacity", capacity) + .add("creationTime", creationTime) + .add("creator", creator) + .add("name", name) + .add("pgVersion", pgVersion) + .add("readWriteDns", readWriteDns) + .add("state", state) + .add("stopped", stopped) + .add("uid", uid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java similarity index 84% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java index 909921d03..536812f91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java index 9bf012769..09dcbff3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; @@ -11,7 +11,7 @@ *

Evolving: this interface is under development. Method signatures may change. */ @Generated -public interface DatabaseInstancesService { +public interface DatabaseService { /** Create a Database Catalog. */ DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest createDatabaseCatalogRequest); @@ -19,6 +19,9 @@ public interface DatabaseInstancesService { DatabaseInstance createDatabaseInstance( CreateDatabaseInstanceRequest createDatabaseInstanceRequest); + /** Create a Database Table. */ + DatabaseTable createDatabaseTable(CreateDatabaseTableRequest createDatabaseTableRequest); + /** Create a Synced Database Table. */ SyncedDatabaseTable createSyncedDatabaseTable( CreateSyncedDatabaseTableRequest createSyncedDatabaseTableRequest); @@ -29,6 +32,9 @@ SyncedDatabaseTable createSyncedDatabaseTable( /** Delete a Database Instance. */ void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest); + /** Delete a Database Table. */ + void deleteDatabaseTable(DeleteDatabaseTableRequest deleteDatabaseTableRequest); + /** Delete a Synced Database Table. */ void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest); @@ -36,12 +42,19 @@ SyncedDatabaseTable createSyncedDatabaseTable( DatabaseInstance findDatabaseInstanceByUid( FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest); + /** Generates a credential that can be used to access database instances. */ + DatabaseCredential generateDatabaseCredential( + GenerateDatabaseCredentialRequest generateDatabaseCredentialRequest); + /** Get a Database Catalog. */ DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest getDatabaseCatalogRequest); /** Get a Database Instance. */ DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest); + /** Get a Database Table. */ + DatabaseTable getDatabaseTable(GetDatabaseTableRequest getDatabaseTableRequest); + /** Get a Synced Database Table. */ SyncedDatabaseTable getSyncedDatabaseTable( GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java new file mode 100755 index 000000000..89e2e36cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java @@ -0,0 +1,154 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Next field marker: 13 */ +@Generated +@JsonSerialize(using = DatabaseTable.DatabaseTableSerializer.class) +@JsonDeserialize(using = DatabaseTable.DatabaseTableDeserializer.class) +public class DatabaseTable { + /** + * Name of the target database instance. This is required when creating database tables in + * standard catalogs. This is optional when creating database tables in registered catalogs. If + * this field is specified when creating database tables in registered catalogs, the database + * instance name MUST match that of the registered catalog (or the request will be rejected). + */ + private String databaseInstanceName; + + /** + * Target Postgres database object (logical database) name for this table. This field is optional + * in all scenarios. + * + *

When creating a table in a registered Postgres catalog, the target Postgres database name is + * inferred to be that of the registered catalog. If this field is specified in this scenario, the + * Postgres database name MUST match that of the registered catalog (or the request will be + * rejected). + * + *

When creating a table in a standard catalog, the target database name is inferred to be that + * of the standard catalog. In this scenario, specifying this field will allow targeting an + * arbitrary postgres database. Note that this has implications for the + * `create_database_objects_is_missing` field in `spec`. + */ + private String logicalDatabaseName; + + /** Full three-part (catalog, schema, table) name of the table. */ + private String name; + + /** Data serving REST API URL for this table */ + private String tableServingUrl; + + public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public DatabaseTable setLogicalDatabaseName(String logicalDatabaseName) { + this.logicalDatabaseName = logicalDatabaseName; + return this; + } + + public String getLogicalDatabaseName() { + return logicalDatabaseName; + } + + public DatabaseTable setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseTable setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseTable that = (DatabaseTable) o; + return Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) + && Objects.equals(name, that.name) + && Objects.equals(tableServingUrl, that.tableServingUrl); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl); + } + + @Override + public String toString() { + return new ToStringer(DatabaseTable.class) + .add("databaseInstanceName", databaseInstanceName) + .add("logicalDatabaseName", logicalDatabaseName) + .add("name", name) + .add("tableServingUrl", tableServingUrl) + .toString(); + } + + DatabaseTablePb toPb() { + DatabaseTablePb pb = new DatabaseTablePb(); + pb.setDatabaseInstanceName(databaseInstanceName); + pb.setLogicalDatabaseName(logicalDatabaseName); + pb.setName(name); + pb.setTableServingUrl(tableServingUrl); + + return pb; + } + + static DatabaseTable fromPb(DatabaseTablePb pb) { + DatabaseTable model = new DatabaseTable(); + model.setDatabaseInstanceName(pb.getDatabaseInstanceName()); + model.setLogicalDatabaseName(pb.getLogicalDatabaseName()); + model.setName(pb.getName()); + model.setTableServingUrl(pb.getTableServingUrl()); + + return model; + } + + public static class DatabaseTableSerializer extends JsonSerializer { + @Override + public void serialize(DatabaseTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabaseTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabaseTableDeserializer extends JsonDeserializer { + @Override + public DatabaseTable deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabaseTablePb pb = mapper.readValue(p, DatabaseTablePb.class); + return DatabaseTable.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTablePb.java new file mode 100755 index 000000000..051150e55 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTablePb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field marker: 13 */ +@Generated +class DatabaseTablePb { + @JsonProperty("database_instance_name") + private String databaseInstanceName; + + @JsonProperty("logical_database_name") + private String logicalDatabaseName; + + @JsonProperty("name") + private String name; + + @JsonProperty("table_serving_url") + private String tableServingUrl; + + public DatabaseTablePb setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public DatabaseTablePb setLogicalDatabaseName(String logicalDatabaseName) { + this.logicalDatabaseName = logicalDatabaseName; + return this; + } + + public String getLogicalDatabaseName() { + return logicalDatabaseName; + } + + public DatabaseTablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseTablePb setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseTablePb that = (DatabaseTablePb) o; + return Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) + && Objects.equals(name, that.name) + && Objects.equals(tableServingUrl, that.tableServingUrl); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl); + } + + @Override + public String toString() { + return new ToStringer(DatabaseTablePb.class) + .add("databaseInstanceName", databaseInstanceName) + .add("logicalDatabaseName", logicalDatabaseName) + .add("name", name) + .add("tableServingUrl", tableServingUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java new file mode 100755 index 000000000..deaa9ec18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Delete a Database Catalog */ +@Generated +@JsonSerialize(using = DeleteDatabaseCatalogRequest.DeleteDatabaseCatalogRequestSerializer.class) +@JsonDeserialize( + using = DeleteDatabaseCatalogRequest.DeleteDatabaseCatalogRequestDeserializer.class) +public class DeleteDatabaseCatalogRequest { + /** */ + private String name; + + public DeleteDatabaseCatalogRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseCatalogRequest that = (DeleteDatabaseCatalogRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseCatalogRequest.class).add("name", name).toString(); + } + + DeleteDatabaseCatalogRequestPb toPb() { + DeleteDatabaseCatalogRequestPb pb = new DeleteDatabaseCatalogRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteDatabaseCatalogRequest fromPb(DeleteDatabaseCatalogRequestPb pb) { + DeleteDatabaseCatalogRequest model = new DeleteDatabaseCatalogRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteDatabaseCatalogRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseCatalogRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseCatalogRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseCatalogRequestPb pb = mapper.readValue(p, DeleteDatabaseCatalogRequestPb.class); + return DeleteDatabaseCatalogRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequestPb.java new file mode 100755 index 000000000..9144560f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Database Catalog */ +@Generated +class DeleteDatabaseCatalogRequestPb { + @JsonIgnore private String name; + + public DeleteDatabaseCatalogRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseCatalogRequestPb that = (DeleteDatabaseCatalogRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseCatalogRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java new file mode 100755 index 000000000..720034f9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DeleteDatabaseCatalogResponse.DeleteDatabaseCatalogResponseSerializer.class) +@JsonDeserialize( + using = DeleteDatabaseCatalogResponse.DeleteDatabaseCatalogResponseDeserializer.class) +public class DeleteDatabaseCatalogResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseCatalogResponse.class).toString(); + } + + DeleteDatabaseCatalogResponsePb toPb() { + DeleteDatabaseCatalogResponsePb pb = new DeleteDatabaseCatalogResponsePb(); + + return pb; + } + + static DeleteDatabaseCatalogResponse fromPb(DeleteDatabaseCatalogResponsePb pb) { + DeleteDatabaseCatalogResponse model = new DeleteDatabaseCatalogResponse(); + + return model; + } + + public static class DeleteDatabaseCatalogResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseCatalogResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseCatalogResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseCatalogResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseCatalogResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseCatalogResponsePb pb = + mapper.readValue(p, DeleteDatabaseCatalogResponsePb.class); + return DeleteDatabaseCatalogResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponsePb.java new file mode 100755 index 000000000..1bf1ba1e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteDatabaseCatalogResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseCatalogResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java new file mode 100755 index 000000000..2509ee389 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Delete a Database Instance */ +@Generated +@JsonSerialize(using = DeleteDatabaseInstanceRequest.DeleteDatabaseInstanceRequestSerializer.class) +@JsonDeserialize( + using = DeleteDatabaseInstanceRequest.DeleteDatabaseInstanceRequestDeserializer.class) +public class DeleteDatabaseInstanceRequest { + /** + * By default, a instance cannot be deleted if it has descendant instances created via PITR. If + * this flag is specified as true, all descendent instances will be deleted as well. + */ + private Boolean force; + + /** Name of the instance to delete. */ + private String name; + + /** + * If false, the database instance is soft deleted. Soft deleted instances behave as if they are + * deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted + * by calling the undelete API for a limited time. If true, the database instance is hard deleted + * and cannot be undeleted. + */ + private Boolean purge; + + public DeleteDatabaseInstanceRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteDatabaseInstanceRequest setPurge(Boolean purge) { + this.purge = purge; + return this; + } + + public Boolean getPurge() { + return purge; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseInstanceRequest that = (DeleteDatabaseInstanceRequest) o; + return Objects.equals(force, that.force) + && Objects.equals(name, that.name) + && Objects.equals(purge, that.purge); + } + + @Override + public int hashCode() { + return Objects.hash(force, name, purge); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseInstanceRequest.class) + .add("force", force) + .add("name", name) + .add("purge", purge) + .toString(); + } + + DeleteDatabaseInstanceRequestPb toPb() { + DeleteDatabaseInstanceRequestPb pb = new DeleteDatabaseInstanceRequestPb(); + pb.setForce(force); + pb.setName(name); + pb.setPurge(purge); + + return pb; + } + + static DeleteDatabaseInstanceRequest fromPb(DeleteDatabaseInstanceRequestPb pb) { + DeleteDatabaseInstanceRequest model = new DeleteDatabaseInstanceRequest(); + model.setForce(pb.getForce()); + model.setName(pb.getName()); + model.setPurge(pb.getPurge()); + + return model; + } + + public static class DeleteDatabaseInstanceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseInstanceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseInstanceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseInstanceRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseInstanceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseInstanceRequestPb pb = + mapper.readValue(p, DeleteDatabaseInstanceRequestPb.class); + return DeleteDatabaseInstanceRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequestPb.java similarity index 56% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequestPb.java index e043e1347..98f83a671 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; @@ -10,29 +10,18 @@ /** Delete a Database Instance */ @Generated -public class DeleteDatabaseInstanceRequest { - /** - * By default, a instance cannot be deleted if it has descendant instances created via PITR. If - * this flag is specified as true, all descendent instances will be deleted as well. - */ +class DeleteDatabaseInstanceRequestPb { @JsonIgnore @QueryParam("force") private Boolean force; - /** Name of the instance to delete. */ @JsonIgnore private String name; - /** - * If false, the database instance is soft deleted. Soft deleted instances behave as if they are - * deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted - * by calling the undelete API for a limited time. If true, the database instance is hard deleted - * and cannot be undeleted. - */ @JsonIgnore @QueryParam("purge") private Boolean purge; - public DeleteDatabaseInstanceRequest setForce(Boolean force) { + public DeleteDatabaseInstanceRequestPb setForce(Boolean force) { this.force = force; return this; } @@ -41,7 +30,7 @@ public Boolean getForce() { return force; } - public DeleteDatabaseInstanceRequest setName(String name) { + public DeleteDatabaseInstanceRequestPb setName(String name) { this.name = name; return this; } @@ -50,7 +39,7 @@ public String getName() { return name; } - public DeleteDatabaseInstanceRequest setPurge(Boolean purge) { + public DeleteDatabaseInstanceRequestPb setPurge(Boolean purge) { this.purge = purge; return this; } @@ -63,7 +52,7 @@ public Boolean getPurge() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - DeleteDatabaseInstanceRequest that = (DeleteDatabaseInstanceRequest) o; + DeleteDatabaseInstanceRequestPb that = (DeleteDatabaseInstanceRequestPb) o; return Objects.equals(force, that.force) && Objects.equals(name, that.name) && Objects.equals(purge, that.purge); @@ -76,7 +65,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteDatabaseInstanceRequest.class) + return new ToStringer(DeleteDatabaseInstanceRequestPb.class) .add("force", force) .add("name", name) .add("purge", purge) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java new file mode 100755 index 000000000..f6724b841 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize( + using = DeleteDatabaseInstanceResponse.DeleteDatabaseInstanceResponseSerializer.class) +@JsonDeserialize( + using = DeleteDatabaseInstanceResponse.DeleteDatabaseInstanceResponseDeserializer.class) +public class DeleteDatabaseInstanceResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseInstanceResponse.class).toString(); + } + + DeleteDatabaseInstanceResponsePb toPb() { + DeleteDatabaseInstanceResponsePb pb = new DeleteDatabaseInstanceResponsePb(); + + return pb; + } + + static DeleteDatabaseInstanceResponse fromPb(DeleteDatabaseInstanceResponsePb pb) { + DeleteDatabaseInstanceResponse model = new DeleteDatabaseInstanceResponse(); + + return model; + } + + public static class DeleteDatabaseInstanceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseInstanceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseInstanceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseInstanceResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseInstanceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseInstanceResponsePb pb = + mapper.readValue(p, DeleteDatabaseInstanceResponsePb.class); + return DeleteDatabaseInstanceResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponsePb.java new file mode 100755 index 000000000..6f43c12a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteDatabaseInstanceResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseInstanceResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java new file mode 100755 index 000000000..1682a65b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Delete a Database Table */ +@Generated +@JsonSerialize(using = DeleteDatabaseTableRequest.DeleteDatabaseTableRequestSerializer.class) +@JsonDeserialize(using = DeleteDatabaseTableRequest.DeleteDatabaseTableRequestDeserializer.class) +public class DeleteDatabaseTableRequest { + /** */ + private String name; + + public DeleteDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseTableRequest that = (DeleteDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseTableRequest.class).add("name", name).toString(); + } + + DeleteDatabaseTableRequestPb toPb() { + DeleteDatabaseTableRequestPb pb = new DeleteDatabaseTableRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteDatabaseTableRequest fromPb(DeleteDatabaseTableRequestPb pb) { + DeleteDatabaseTableRequest model = new DeleteDatabaseTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseTableRequestPb pb = mapper.readValue(p, DeleteDatabaseTableRequestPb.class); + return DeleteDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequestPb.java new file mode 100755 index 000000000..2e8fd785a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Database Table */ +@Generated +class DeleteDatabaseTableRequestPb { + @JsonIgnore private String name; + + public DeleteDatabaseTableRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseTableRequestPb that = (DeleteDatabaseTableRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseTableRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java new file mode 100755 index 000000000..a0476ae84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DeleteDatabaseTableResponse.DeleteDatabaseTableResponseSerializer.class) +@JsonDeserialize(using = DeleteDatabaseTableResponse.DeleteDatabaseTableResponseDeserializer.class) +public class DeleteDatabaseTableResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseTableResponse.class).toString(); + } + + DeleteDatabaseTableResponsePb toPb() { + DeleteDatabaseTableResponsePb pb = new DeleteDatabaseTableResponsePb(); + + return pb; + } + + static DeleteDatabaseTableResponse fromPb(DeleteDatabaseTableResponsePb pb) { + DeleteDatabaseTableResponse model = new DeleteDatabaseTableResponse(); + + return model; + } + + public static class DeleteDatabaseTableResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDatabaseTableResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDatabaseTableResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDatabaseTableResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDatabaseTableResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDatabaseTableResponsePb pb = mapper.readValue(p, DeleteDatabaseTableResponsePb.class); + return DeleteDatabaseTableResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponsePb.java new file mode 100755 index 000000000..0c361fbc1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteDatabaseTableResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseTableResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..11868c12a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java @@ -0,0 +1,93 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Delete a Synced Database Table */ +@Generated +@JsonSerialize( + using = DeleteSyncedDatabaseTableRequest.DeleteSyncedDatabaseTableRequestSerializer.class) +@JsonDeserialize( + using = DeleteSyncedDatabaseTableRequest.DeleteSyncedDatabaseTableRequestDeserializer.class) +public class DeleteSyncedDatabaseTableRequest { + /** */ + private String name; + + public DeleteSyncedDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSyncedDatabaseTableRequest that = (DeleteSyncedDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteSyncedDatabaseTableRequest.class).add("name", name).toString(); + } + + DeleteSyncedDatabaseTableRequestPb toPb() { + DeleteSyncedDatabaseTableRequestPb pb = new DeleteSyncedDatabaseTableRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteSyncedDatabaseTableRequest fromPb(DeleteSyncedDatabaseTableRequestPb pb) { + DeleteSyncedDatabaseTableRequest model = new DeleteSyncedDatabaseTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteSyncedDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSyncedDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSyncedDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSyncedDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteSyncedDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSyncedDatabaseTableRequestPb pb = + mapper.readValue(p, DeleteSyncedDatabaseTableRequestPb.class); + return DeleteSyncedDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequestPb.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequestPb.java index 506ab393b..21f1ded07 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,11 +9,10 @@ /** Delete a Synced Database Table */ @Generated -public class DeleteSyncedDatabaseTableRequest { - /** */ +class DeleteSyncedDatabaseTableRequestPb { @JsonIgnore private String name; - public DeleteSyncedDatabaseTableRequest setName(String name) { + public DeleteSyncedDatabaseTableRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +25,7 @@ public String getName() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - DeleteSyncedDatabaseTableRequest that = (DeleteSyncedDatabaseTableRequest) o; + DeleteSyncedDatabaseTableRequestPb that = (DeleteSyncedDatabaseTableRequestPb) o; return Objects.equals(name, that.name); } @@ -37,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteSyncedDatabaseTableRequest.class).add("name", name).toString(); + return new ToStringer(DeleteSyncedDatabaseTableRequestPb.class).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java new file mode 100755 index 000000000..cc5edc734 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize( + using = DeleteSyncedDatabaseTableResponse.DeleteSyncedDatabaseTableResponseSerializer.class) +@JsonDeserialize( + using = DeleteSyncedDatabaseTableResponse.DeleteSyncedDatabaseTableResponseDeserializer.class) +public class DeleteSyncedDatabaseTableResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteSyncedDatabaseTableResponse.class).toString(); + } + + DeleteSyncedDatabaseTableResponsePb toPb() { + DeleteSyncedDatabaseTableResponsePb pb = new DeleteSyncedDatabaseTableResponsePb(); + + return pb; + } + + static DeleteSyncedDatabaseTableResponse fromPb(DeleteSyncedDatabaseTableResponsePb pb) { + DeleteSyncedDatabaseTableResponse model = new DeleteSyncedDatabaseTableResponse(); + + return model; + } + + public static class DeleteSyncedDatabaseTableResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSyncedDatabaseTableResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSyncedDatabaseTableResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSyncedDatabaseTableResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteSyncedDatabaseTableResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSyncedDatabaseTableResponsePb pb = + mapper.readValue(p, DeleteSyncedDatabaseTableResponsePb.class); + return DeleteSyncedDatabaseTableResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponsePb.java new file mode 100755 index 000000000..60b87da91 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteSyncedDatabaseTableResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteSyncedDatabaseTableResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java new file mode 100755 index 000000000..3c85059cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java @@ -0,0 +1,93 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Find a Database Instance by uid */ +@Generated +@JsonSerialize( + using = FindDatabaseInstanceByUidRequest.FindDatabaseInstanceByUidRequestSerializer.class) +@JsonDeserialize( + using = FindDatabaseInstanceByUidRequest.FindDatabaseInstanceByUidRequestDeserializer.class) +public class FindDatabaseInstanceByUidRequest { + /** UID of the cluster to get. */ + private String uid; + + public FindDatabaseInstanceByUidRequest setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FindDatabaseInstanceByUidRequest that = (FindDatabaseInstanceByUidRequest) o; + return Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash(uid); + } + + @Override + public String toString() { + return new ToStringer(FindDatabaseInstanceByUidRequest.class).add("uid", uid).toString(); + } + + FindDatabaseInstanceByUidRequestPb toPb() { + FindDatabaseInstanceByUidRequestPb pb = new FindDatabaseInstanceByUidRequestPb(); + pb.setUid(uid); + + return pb; + } + + static FindDatabaseInstanceByUidRequest fromPb(FindDatabaseInstanceByUidRequestPb pb) { + FindDatabaseInstanceByUidRequest model = new FindDatabaseInstanceByUidRequest(); + model.setUid(pb.getUid()); + + return model; + } + + public static class FindDatabaseInstanceByUidRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + FindDatabaseInstanceByUidRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FindDatabaseInstanceByUidRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FindDatabaseInstanceByUidRequestDeserializer + extends JsonDeserializer { + @Override + public FindDatabaseInstanceByUidRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FindDatabaseInstanceByUidRequestPb pb = + mapper.readValue(p, FindDatabaseInstanceByUidRequestPb.class); + return FindDatabaseInstanceByUidRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequestPb.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequestPb.java index 894cb8153..07a948b0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; @@ -10,13 +10,12 @@ /** Find a Database Instance by uid */ @Generated -public class FindDatabaseInstanceByUidRequest { - /** UID of the cluster to get. */ +class FindDatabaseInstanceByUidRequestPb { @JsonIgnore @QueryParam("uid") private String uid; - public FindDatabaseInstanceByUidRequest setUid(String uid) { + public FindDatabaseInstanceByUidRequestPb setUid(String uid) { this.uid = uid; return this; } @@ -29,7 +28,7 @@ public String getUid() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - FindDatabaseInstanceByUidRequest that = (FindDatabaseInstanceByUidRequest) o; + FindDatabaseInstanceByUidRequestPb that = (FindDatabaseInstanceByUidRequestPb) o; return Objects.equals(uid, that.uid); } @@ -40,6 +39,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(FindDatabaseInstanceByUidRequest.class).add("uid", uid).toString(); + return new ToStringer(FindDatabaseInstanceByUidRequestPb.class).add("uid", uid).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java new file mode 100755 index 000000000..26303933c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java @@ -0,0 +1,112 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +/** Generates a credential that can be used to access database instances */ +@Generated +@JsonSerialize( + using = GenerateDatabaseCredentialRequest.GenerateDatabaseCredentialRequestSerializer.class) +@JsonDeserialize( + using = GenerateDatabaseCredentialRequest.GenerateDatabaseCredentialRequestDeserializer.class) +public class GenerateDatabaseCredentialRequest { + /** Instances to which the token will be scoped. */ + private Collection instanceNames; + + /** */ + private String requestId; + + public GenerateDatabaseCredentialRequest setInstanceNames(Collection instanceNames) { + this.instanceNames = instanceNames; + return this; + } + + public Collection getInstanceNames() { + return instanceNames; + } + + public GenerateDatabaseCredentialRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateDatabaseCredentialRequest that = (GenerateDatabaseCredentialRequest) o; + return Objects.equals(instanceNames, that.instanceNames) + && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(instanceNames, requestId); + } + + @Override + public String toString() { + return new ToStringer(GenerateDatabaseCredentialRequest.class) + .add("instanceNames", instanceNames) + .add("requestId", requestId) + .toString(); + } + + GenerateDatabaseCredentialRequestPb toPb() { + GenerateDatabaseCredentialRequestPb pb = new GenerateDatabaseCredentialRequestPb(); + pb.setInstanceNames(instanceNames); + pb.setRequestId(requestId); + + return pb; + } + + static GenerateDatabaseCredentialRequest fromPb(GenerateDatabaseCredentialRequestPb pb) { + GenerateDatabaseCredentialRequest model = new GenerateDatabaseCredentialRequest(); + model.setInstanceNames(pb.getInstanceNames()); + model.setRequestId(pb.getRequestId()); + + return model; + } + + public static class GenerateDatabaseCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GenerateDatabaseCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenerateDatabaseCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenerateDatabaseCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GenerateDatabaseCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenerateDatabaseCredentialRequestPb pb = + mapper.readValue(p, GenerateDatabaseCredentialRequestPb.class); + return GenerateDatabaseCredentialRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequestPb.java new file mode 100755 index 000000000..43178544d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Generates a credential that can be used to access database instances */ +@Generated +class GenerateDatabaseCredentialRequestPb { + @JsonProperty("instance_names") + private Collection instanceNames; + + @JsonProperty("request_id") + private String requestId; + + public GenerateDatabaseCredentialRequestPb setInstanceNames(Collection instanceNames) { + this.instanceNames = instanceNames; + return this; + } + + public Collection getInstanceNames() { + return instanceNames; + } + + public GenerateDatabaseCredentialRequestPb setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateDatabaseCredentialRequestPb that = (GenerateDatabaseCredentialRequestPb) o; + return Objects.equals(instanceNames, that.instanceNames) + && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(instanceNames, requestId); + } + + @Override + public String toString() { + return new ToStringer(GenerateDatabaseCredentialRequestPb.class) + .add("instanceNames", instanceNames) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java new file mode 100755 index 000000000..5d33496f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Get a Database Catalog */ +@Generated +@JsonSerialize(using = GetDatabaseCatalogRequest.GetDatabaseCatalogRequestSerializer.class) +@JsonDeserialize(using = GetDatabaseCatalogRequest.GetDatabaseCatalogRequestDeserializer.class) +public class GetDatabaseCatalogRequest { + /** */ + private String name; + + public GetDatabaseCatalogRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseCatalogRequest that = (GetDatabaseCatalogRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseCatalogRequest.class).add("name", name).toString(); + } + + GetDatabaseCatalogRequestPb toPb() { + GetDatabaseCatalogRequestPb pb = new GetDatabaseCatalogRequestPb(); + pb.setName(name); + + return pb; + } + + static GetDatabaseCatalogRequest fromPb(GetDatabaseCatalogRequestPb pb) { + GetDatabaseCatalogRequest model = new GetDatabaseCatalogRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetDatabaseCatalogRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDatabaseCatalogRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDatabaseCatalogRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDatabaseCatalogRequestDeserializer + extends JsonDeserializer { + @Override + public GetDatabaseCatalogRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDatabaseCatalogRequestPb pb = mapper.readValue(p, GetDatabaseCatalogRequestPb.class); + return GetDatabaseCatalogRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequestPb.java new file mode 100755 index 000000000..5f3c65fc9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Catalog */ +@Generated +class GetDatabaseCatalogRequestPb { + @JsonIgnore private String name; + + public GetDatabaseCatalogRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseCatalogRequestPb that = (GetDatabaseCatalogRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseCatalogRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java new file mode 100755 index 000000000..2079e23c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Get a Database Instance */ +@Generated +@JsonSerialize(using = GetDatabaseInstanceRequest.GetDatabaseInstanceRequestSerializer.class) +@JsonDeserialize(using = GetDatabaseInstanceRequest.GetDatabaseInstanceRequestDeserializer.class) +public class GetDatabaseInstanceRequest { + /** Name of the cluster to get. */ + private String name; + + public GetDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseInstanceRequest that = (GetDatabaseInstanceRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseInstanceRequest.class).add("name", name).toString(); + } + + GetDatabaseInstanceRequestPb toPb() { + GetDatabaseInstanceRequestPb pb = new GetDatabaseInstanceRequestPb(); + pb.setName(name); + + return pb; + } + + static GetDatabaseInstanceRequest fromPb(GetDatabaseInstanceRequestPb pb) { + GetDatabaseInstanceRequest model = new GetDatabaseInstanceRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetDatabaseInstanceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDatabaseInstanceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDatabaseInstanceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDatabaseInstanceRequestDeserializer + extends JsonDeserializer { + @Override + public GetDatabaseInstanceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDatabaseInstanceRequestPb pb = mapper.readValue(p, GetDatabaseInstanceRequestPb.class); + return GetDatabaseInstanceRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequestPb.java new file mode 100755 index 000000000..b2b287342 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Instance */ +@Generated +class GetDatabaseInstanceRequestPb { + @JsonIgnore private String name; + + public GetDatabaseInstanceRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseInstanceRequestPb that = (GetDatabaseInstanceRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseInstanceRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java new file mode 100755 index 000000000..ae7ce1a66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Get a Database Table */ +@Generated +@JsonSerialize(using = GetDatabaseTableRequest.GetDatabaseTableRequestSerializer.class) +@JsonDeserialize(using = GetDatabaseTableRequest.GetDatabaseTableRequestDeserializer.class) +public class GetDatabaseTableRequest { + /** */ + private String name; + + public GetDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseTableRequest that = (GetDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseTableRequest.class).add("name", name).toString(); + } + + GetDatabaseTableRequestPb toPb() { + GetDatabaseTableRequestPb pb = new GetDatabaseTableRequestPb(); + pb.setName(name); + + return pb; + } + + static GetDatabaseTableRequest fromPb(GetDatabaseTableRequestPb pb) { + GetDatabaseTableRequest model = new GetDatabaseTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public GetDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDatabaseTableRequestPb pb = mapper.readValue(p, GetDatabaseTableRequestPb.class); + return GetDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequestPb.java new file mode 100755 index 000000000..c95e9d8ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Table */ +@Generated +class GetDatabaseTableRequestPb { + @JsonIgnore private String name; + + public GetDatabaseTableRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseTableRequestPb that = (GetDatabaseTableRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseTableRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..cabae0b8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Get a Synced Database Table */ +@Generated +@JsonSerialize(using = GetSyncedDatabaseTableRequest.GetSyncedDatabaseTableRequestSerializer.class) +@JsonDeserialize( + using = GetSyncedDatabaseTableRequest.GetSyncedDatabaseTableRequestDeserializer.class) +public class GetSyncedDatabaseTableRequest { + /** */ + private String name; + + public GetSyncedDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSyncedDatabaseTableRequest that = (GetSyncedDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetSyncedDatabaseTableRequest.class).add("name", name).toString(); + } + + GetSyncedDatabaseTableRequestPb toPb() { + GetSyncedDatabaseTableRequestPb pb = new GetSyncedDatabaseTableRequestPb(); + pb.setName(name); + + return pb; + } + + static GetSyncedDatabaseTableRequest fromPb(GetSyncedDatabaseTableRequestPb pb) { + GetSyncedDatabaseTableRequest model = new GetSyncedDatabaseTableRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetSyncedDatabaseTableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetSyncedDatabaseTableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSyncedDatabaseTableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSyncedDatabaseTableRequestDeserializer + extends JsonDeserializer { + @Override + public GetSyncedDatabaseTableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSyncedDatabaseTableRequestPb pb = + mapper.readValue(p, GetSyncedDatabaseTableRequestPb.class); + return GetSyncedDatabaseTableRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequestPb.java new file mode 100755 index 000000000..c29ca9e17 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Synced Database Table */ +@Generated +class GetSyncedDatabaseTableRequestPb { + @JsonIgnore private String name; + + public GetSyncedDatabaseTableRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSyncedDatabaseTableRequestPb that = (GetSyncedDatabaseTableRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetSyncedDatabaseTableRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java new file mode 100755 index 000000000..c068af4c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java @@ -0,0 +1,110 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** List Database Instances */ +@Generated +@JsonSerialize(using = ListDatabaseInstancesRequest.ListDatabaseInstancesRequestSerializer.class) +@JsonDeserialize( + using = ListDatabaseInstancesRequest.ListDatabaseInstancesRequestDeserializer.class) +public class ListDatabaseInstancesRequest { + /** Upper bound for items returned. */ + private Long pageSize; + + /** + * Pagination token to go to the next page of Database Instances. Requests first page if absent. + */ + private String pageToken; + + public ListDatabaseInstancesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDatabaseInstancesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseInstancesRequest that = (ListDatabaseInstancesRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseInstancesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } + + ListDatabaseInstancesRequestPb toPb() { + ListDatabaseInstancesRequestPb pb = new ListDatabaseInstancesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListDatabaseInstancesRequest fromPb(ListDatabaseInstancesRequestPb pb) { + ListDatabaseInstancesRequest model = new ListDatabaseInstancesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListDatabaseInstancesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDatabaseInstancesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDatabaseInstancesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDatabaseInstancesRequestDeserializer + extends JsonDeserializer { + @Override + public ListDatabaseInstancesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDatabaseInstancesRequestPb pb = mapper.readValue(p, ListDatabaseInstancesRequestPb.class); + return ListDatabaseInstancesRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequestPb.java similarity index 69% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequestPb.java index dbea4c7b3..b63deda72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; @@ -10,20 +10,16 @@ /** List Database Instances */ @Generated -public class ListDatabaseInstancesRequest { - /** Upper bound for items returned. */ +class ListDatabaseInstancesRequestPb { @JsonIgnore @QueryParam("page_size") private Long pageSize; - /** - * Pagination token to go to the next page of Database Instances. Requests first page if absent. - */ @JsonIgnore @QueryParam("page_token") private String pageToken; - public ListDatabaseInstancesRequest setPageSize(Long pageSize) { + public ListDatabaseInstancesRequestPb setPageSize(Long pageSize) { this.pageSize = pageSize; return this; } @@ -32,7 +28,7 @@ public Long getPageSize() { return pageSize; } - public ListDatabaseInstancesRequest setPageToken(String pageToken) { + public ListDatabaseInstancesRequestPb setPageToken(String pageToken) { this.pageToken = pageToken; return this; } @@ -45,7 +41,7 @@ public String getPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListDatabaseInstancesRequest that = (ListDatabaseInstancesRequest) o; + ListDatabaseInstancesRequestPb that = (ListDatabaseInstancesRequestPb) o; return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); } @@ -56,7 +52,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ListDatabaseInstancesRequest.class) + return new ToStringer(ListDatabaseInstancesRequestPb.class) .add("pageSize", pageSize) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java new file mode 100755 index 000000000..4476a7edd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java @@ -0,0 +1,111 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +@Generated +@JsonSerialize(using = ListDatabaseInstancesResponse.ListDatabaseInstancesResponseSerializer.class) +@JsonDeserialize( + using = ListDatabaseInstancesResponse.ListDatabaseInstancesResponseDeserializer.class) +public class ListDatabaseInstancesResponse { + /** List of instances. */ + private Collection databaseInstances; + + /** Pagination token to request the next page of instances. */ + private String nextPageToken; + + public ListDatabaseInstancesResponse setDatabaseInstances( + Collection databaseInstances) { + this.databaseInstances = databaseInstances; + return this; + } + + public Collection getDatabaseInstances() { + return databaseInstances; + } + + public ListDatabaseInstancesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseInstancesResponse that = (ListDatabaseInstancesResponse) o; + return Objects.equals(databaseInstances, that.databaseInstances) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstances, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseInstancesResponse.class) + .add("databaseInstances", databaseInstances) + .add("nextPageToken", nextPageToken) + .toString(); + } + + ListDatabaseInstancesResponsePb toPb() { + ListDatabaseInstancesResponsePb pb = new ListDatabaseInstancesResponsePb(); + pb.setDatabaseInstances(databaseInstances); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListDatabaseInstancesResponse fromPb(ListDatabaseInstancesResponsePb pb) { + ListDatabaseInstancesResponse model = new ListDatabaseInstancesResponse(); + model.setDatabaseInstances(pb.getDatabaseInstances()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListDatabaseInstancesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDatabaseInstancesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDatabaseInstancesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDatabaseInstancesResponseDeserializer + extends JsonDeserializer { + @Override + public ListDatabaseInstancesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDatabaseInstancesResponsePb pb = + mapper.readValue(p, ListDatabaseInstancesResponsePb.class); + return ListDatabaseInstancesResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponsePb.java similarity index 74% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponsePb.java index e797c3e3a..976c61cb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponsePb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,16 +9,14 @@ import java.util.Objects; @Generated -public class ListDatabaseInstancesResponse { - /** List of instances. */ +class ListDatabaseInstancesResponsePb { @JsonProperty("database_instances") private Collection databaseInstances; - /** Pagination token to request the next page of instances. */ @JsonProperty("next_page_token") private String nextPageToken; - public ListDatabaseInstancesResponse setDatabaseInstances( + public ListDatabaseInstancesResponsePb setDatabaseInstances( Collection databaseInstances) { this.databaseInstances = databaseInstances; return this; @@ -28,7 +26,7 @@ public Collection getDatabaseInstances() { return databaseInstances; } - public ListDatabaseInstancesResponse setNextPageToken(String nextPageToken) { + public ListDatabaseInstancesResponsePb setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -41,7 +39,7 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListDatabaseInstancesResponse that = (ListDatabaseInstancesResponse) o; + ListDatabaseInstancesResponsePb that = (ListDatabaseInstancesResponsePb) o; return Objects.equals(databaseInstances, that.databaseInstances) && Objects.equals(nextPageToken, that.nextPageToken); } @@ -53,7 +51,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ListDatabaseInstancesResponse.class) + return new ToStringer(ListDatabaseInstancesResponsePb.class) .add("databaseInstances", databaseInstances) .add("nextPageToken", nextPageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java new file mode 100755 index 000000000..acbd48f04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** + * Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other + * fields of pipeline are still inferred by table def internally + */ +@Generated +@JsonSerialize(using = NewPipelineSpec.NewPipelineSpecSerializer.class) +@JsonDeserialize(using = NewPipelineSpec.NewPipelineSpecDeserializer.class) +public class NewPipelineSpec { + /** + * UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This + * needs to be a standard catalog where the user has permissions to create Delta tables. + */ + private String storageCatalog; + + /** + * UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This + * needs to be in the standard catalog where the user has permissions to create Delta tables. + */ + private String storageSchema; + + public NewPipelineSpec setStorageCatalog(String storageCatalog) { + this.storageCatalog = storageCatalog; + return this; + } + + public String getStorageCatalog() { + return storageCatalog; + } + + public NewPipelineSpec setStorageSchema(String storageSchema) { + this.storageSchema = storageSchema; + return this; + } + + public String getStorageSchema() { + return storageSchema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NewPipelineSpec that = (NewPipelineSpec) o; + return Objects.equals(storageCatalog, that.storageCatalog) + && Objects.equals(storageSchema, that.storageSchema); + } + + @Override + public int hashCode() { + return Objects.hash(storageCatalog, storageSchema); + } + + @Override + public String toString() { + return new ToStringer(NewPipelineSpec.class) + .add("storageCatalog", storageCatalog) + .add("storageSchema", storageSchema) + .toString(); + } + + NewPipelineSpecPb toPb() { + NewPipelineSpecPb pb = new NewPipelineSpecPb(); + pb.setStorageCatalog(storageCatalog); + pb.setStorageSchema(storageSchema); + + return pb; + } + + static NewPipelineSpec fromPb(NewPipelineSpecPb pb) { + NewPipelineSpec model = new NewPipelineSpec(); + model.setStorageCatalog(pb.getStorageCatalog()); + model.setStorageSchema(pb.getStorageSchema()); + + return model; + } + + public static class NewPipelineSpecSerializer extends JsonSerializer { + @Override + public void serialize(NewPipelineSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NewPipelineSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NewPipelineSpecDeserializer extends JsonDeserializer { + @Override + public NewPipelineSpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NewPipelineSpecPb pb = mapper.readValue(p, NewPipelineSpecPb.class); + return NewPipelineSpec.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpecPb.java similarity index 66% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpecPb.java index 2cf1e9752..6aa2d7e9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpecPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -12,22 +12,14 @@ * fields of pipeline are still inferred by table def internally */ @Generated -public class NewPipelineSpec { - /** - * UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This - * needs to be a standard catalog where the user has permissions to create Delta tables. - */ +class NewPipelineSpecPb { @JsonProperty("storage_catalog") private String storageCatalog; - /** - * UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This - * needs to be in the standard catalog where the user has permissions to create Delta tables. - */ @JsonProperty("storage_schema") private String storageSchema; - public NewPipelineSpec setStorageCatalog(String storageCatalog) { + public NewPipelineSpecPb setStorageCatalog(String storageCatalog) { this.storageCatalog = storageCatalog; return this; } @@ -36,7 +28,7 @@ public String getStorageCatalog() { return storageCatalog; } - public NewPipelineSpec setStorageSchema(String storageSchema) { + public NewPipelineSpecPb setStorageSchema(String storageSchema) { this.storageSchema = storageSchema; return this; } @@ -49,7 +41,7 @@ public String getStorageSchema() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - NewPipelineSpec that = (NewPipelineSpec) o; + NewPipelineSpecPb that = (NewPipelineSpecPb) o; return Objects.equals(storageCatalog, that.storageCatalog) && Objects.equals(storageSchema, that.storageSchema); } @@ -61,7 +53,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(NewPipelineSpec.class) + return new ToStringer(NewPipelineSpecPb.class) .add("storageCatalog", storageCatalog) .add("storageSchema", storageSchema) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java new file mode 100755 index 000000000..88dcf59fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ProvisioningInfoState { + ACTIVE, + DEGRADED, + DELETING, + FAILED, + PROVISIONING, + UPDATING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java similarity index 64% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java index dc513fb25..a1aca8b8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java @@ -1,18 +1,28 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; -/** Next field marker: 10 */ +/** Next field marker: 12 */ @Generated +@JsonSerialize(using = SyncedDatabaseTable.SyncedDatabaseTableSerializer.class) +@JsonDeserialize(using = SyncedDatabaseTable.SyncedDatabaseTableDeserializer.class) public class SyncedDatabaseTable { /** Synced Table data synchronization status */ - @JsonProperty("data_synchronization_status") - private OnlineTableStatus dataSynchronizationStatus; + private SyncedTableStatus dataSynchronizationStatus; /** * Name of the target database instance. This is required when creating synced database tables in @@ -21,7 +31,6 @@ public class SyncedDatabaseTable { * catalogs, the database instance name MUST match that of the registered catalog (or the request * will be rejected). */ - @JsonProperty("database_instance_name") private String databaseInstanceName; /** @@ -37,19 +46,15 @@ public class SyncedDatabaseTable { * be that of the standard catalog. In this scenario, specifying this field will allow targeting * an arbitrary postgres database. */ - @JsonProperty("logical_database_name") private String logicalDatabaseName; /** Full three-part (catalog, schema, table) name of the table. */ - @JsonProperty("name") private String name; /** Specification of a synced database table. */ - @JsonProperty("spec") private SyncedTableSpec spec; /** Data serving REST API URL for this table */ - @JsonProperty("table_serving_url") private String tableServingUrl; /** @@ -57,16 +62,15 @@ public class SyncedDatabaseTable { * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline * may be in "PROVISIONING" as it runs asynchronously). */ - @JsonProperty("unity_catalog_provisioning_state") private ProvisioningInfoState unityCatalogProvisioningState; public SyncedDatabaseTable setDataSynchronizationStatus( - OnlineTableStatus dataSynchronizationStatus) { + SyncedTableStatus dataSynchronizationStatus) { this.dataSynchronizationStatus = dataSynchronizationStatus; return this; } - public OnlineTableStatus getDataSynchronizationStatus() { + public SyncedTableStatus getDataSynchronizationStatus() { return dataSynchronizationStatus; } @@ -163,4 +167,51 @@ public String toString() { .add("unityCatalogProvisioningState", unityCatalogProvisioningState) .toString(); } + + SyncedDatabaseTablePb toPb() { + SyncedDatabaseTablePb pb = new SyncedDatabaseTablePb(); + pb.setDataSynchronizationStatus(dataSynchronizationStatus); + pb.setDatabaseInstanceName(databaseInstanceName); + pb.setLogicalDatabaseName(logicalDatabaseName); + pb.setName(name); + pb.setSpec(spec); + pb.setTableServingUrl(tableServingUrl); + pb.setUnityCatalogProvisioningState(unityCatalogProvisioningState); + + return pb; + } + + static SyncedDatabaseTable fromPb(SyncedDatabaseTablePb pb) { + SyncedDatabaseTable model = new SyncedDatabaseTable(); + model.setDataSynchronizationStatus(pb.getDataSynchronizationStatus()); + model.setDatabaseInstanceName(pb.getDatabaseInstanceName()); + model.setLogicalDatabaseName(pb.getLogicalDatabaseName()); + model.setName(pb.getName()); + model.setSpec(pb.getSpec()); + model.setTableServingUrl(pb.getTableServingUrl()); + model.setUnityCatalogProvisioningState(pb.getUnityCatalogProvisioningState()); + + return model; + } + + public static class SyncedDatabaseTableSerializer extends JsonSerializer { + @Override + public void serialize(SyncedDatabaseTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedDatabaseTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedDatabaseTableDeserializer + extends JsonDeserializer { + @Override + public SyncedDatabaseTable deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedDatabaseTablePb pb = mapper.readValue(p, SyncedDatabaseTablePb.class); + return SyncedDatabaseTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTablePb.java new file mode 100755 index 000000000..606cc01b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTablePb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field marker: 12 */ +@Generated +class SyncedDatabaseTablePb { + @JsonProperty("data_synchronization_status") + private SyncedTableStatus dataSynchronizationStatus; + + @JsonProperty("database_instance_name") + private String databaseInstanceName; + + @JsonProperty("logical_database_name") + private String logicalDatabaseName; + + @JsonProperty("name") + private String name; + + @JsonProperty("spec") + private SyncedTableSpec spec; + + @JsonProperty("table_serving_url") + private String tableServingUrl; + + @JsonProperty("unity_catalog_provisioning_state") + private ProvisioningInfoState unityCatalogProvisioningState; + + public SyncedDatabaseTablePb setDataSynchronizationStatus( + SyncedTableStatus dataSynchronizationStatus) { + this.dataSynchronizationStatus = dataSynchronizationStatus; + return this; + } + + public SyncedTableStatus getDataSynchronizationStatus() { + return dataSynchronizationStatus; + } + + public SyncedDatabaseTablePb setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public SyncedDatabaseTablePb setLogicalDatabaseName(String logicalDatabaseName) { + this.logicalDatabaseName = logicalDatabaseName; + return this; + } + + public String getLogicalDatabaseName() { + return logicalDatabaseName; + } + + public SyncedDatabaseTablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SyncedDatabaseTablePb setSpec(SyncedTableSpec spec) { + this.spec = spec; + return this; + } + + public SyncedTableSpec getSpec() { + return spec; + } + + public SyncedDatabaseTablePb setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + public SyncedDatabaseTablePb setUnityCatalogProvisioningState( + ProvisioningInfoState unityCatalogProvisioningState) { + this.unityCatalogProvisioningState = unityCatalogProvisioningState; + return this; + } + + public ProvisioningInfoState getUnityCatalogProvisioningState() { + return unityCatalogProvisioningState; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedDatabaseTablePb that = (SyncedDatabaseTablePb) o; + return Objects.equals(dataSynchronizationStatus, that.dataSynchronizationStatus) + && Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) + && Objects.equals(name, that.name) + && Objects.equals(spec, that.spec) + && Objects.equals(tableServingUrl, that.tableServingUrl) + && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); + } + + @Override + public int hashCode() { + return Objects.hash( + dataSynchronizationStatus, + databaseInstanceName, + logicalDatabaseName, + name, + spec, + tableServingUrl, + unityCatalogProvisioningState); + } + + @Override + public String toString() { + return new ToStringer(SyncedDatabaseTablePb.class) + .add("dataSynchronizationStatus", dataSynchronizationStatus) + .add("databaseInstanceName", databaseInstanceName) + .add("logicalDatabaseName", logicalDatabaseName) + .add("name", name) + .add("spec", spec) + .add("tableServingUrl", tableServingUrl) + .add("unityCatalogProvisioningState", unityCatalogProvisioningState) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java new file mode 100755 index 000000000..ac551e88a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. + */ +@Generated +@JsonSerialize( + using = SyncedTableContinuousUpdateStatus.SyncedTableContinuousUpdateStatusSerializer.class) +@JsonDeserialize( + using = SyncedTableContinuousUpdateStatus.SyncedTableContinuousUpdateStatusDeserializer.class) +public class SyncedTableContinuousUpdateStatus { + /** Progress of the initial data synchronization. */ + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may not be completely synced to the synced table yet. + */ + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. + */ + private String timestamp; + + public SyncedTableContinuousUpdateStatus setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + public SyncedTableContinuousUpdateStatus setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableContinuousUpdateStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableContinuousUpdateStatus that = (SyncedTableContinuousUpdateStatus) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress) + && Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress, lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableContinuousUpdateStatus.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } + + SyncedTableContinuousUpdateStatusPb toPb() { + SyncedTableContinuousUpdateStatusPb pb = new SyncedTableContinuousUpdateStatusPb(); + pb.setInitialPipelineSyncProgress(initialPipelineSyncProgress); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + + return pb; + } + + static SyncedTableContinuousUpdateStatus fromPb(SyncedTableContinuousUpdateStatusPb pb) { + SyncedTableContinuousUpdateStatus model = new SyncedTableContinuousUpdateStatus(); + model.setInitialPipelineSyncProgress(pb.getInitialPipelineSyncProgress()); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class SyncedTableContinuousUpdateStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + SyncedTableContinuousUpdateStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableContinuousUpdateStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableContinuousUpdateStatusDeserializer + extends JsonDeserializer { + @Override + public SyncedTableContinuousUpdateStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableContinuousUpdateStatusPb pb = + mapper.readValue(p, SyncedTableContinuousUpdateStatusPb.class); + return SyncedTableContinuousUpdateStatus.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatusPb.java new file mode 100755 index 000000000..35b9dfc64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatusPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. + */ +@Generated +class SyncedTableContinuousUpdateStatusPb { + @JsonProperty("initial_pipeline_sync_progress") + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + public SyncedTableContinuousUpdateStatusPb setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + public SyncedTableContinuousUpdateStatusPb setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableContinuousUpdateStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableContinuousUpdateStatusPb that = (SyncedTableContinuousUpdateStatusPb) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress) + && Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress, lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableContinuousUpdateStatusPb.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java new file mode 100755 index 000000000..8daa5779d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java @@ -0,0 +1,118 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + * SYNCED_PIPELINE_FAILED state. + */ +@Generated +@JsonSerialize(using = SyncedTableFailedStatus.SyncedTableFailedStatusSerializer.class) +@JsonDeserialize(using = SyncedTableFailedStatus.SyncedTableFailedStatusDeserializer.class) +public class SyncedTableFailedStatus { + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may only be partially synced to the synced table. Only populated if the table is still + * synced and available for serving. + */ + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. Only populated if the table is still synced and available for serving. + */ + private String timestamp; + + public SyncedTableFailedStatus setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableFailedStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableFailedStatus that = (SyncedTableFailedStatus) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableFailedStatus.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } + + SyncedTableFailedStatusPb toPb() { + SyncedTableFailedStatusPb pb = new SyncedTableFailedStatusPb(); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + + return pb; + } + + static SyncedTableFailedStatus fromPb(SyncedTableFailedStatusPb pb) { + SyncedTableFailedStatus model = new SyncedTableFailedStatus(); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class SyncedTableFailedStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + SyncedTableFailedStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableFailedStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableFailedStatusDeserializer + extends JsonDeserializer { + @Override + public SyncedTableFailedStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableFailedStatusPb pb = mapper.readValue(p, SyncedTableFailedStatusPb.class); + return SyncedTableFailedStatus.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatusPb.java new file mode 100755 index 000000000..133025dc2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatusPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + * SYNCED_PIPELINE_FAILED state. + */ +@Generated +class SyncedTableFailedStatusPb { + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + public SyncedTableFailedStatusPb setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableFailedStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableFailedStatusPb that = (SyncedTableFailedStatusPb) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableFailedStatusPb.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java new file mode 100755 index 000000000..b648fb5d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java @@ -0,0 +1,168 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Progress information of the Synced Table data synchronization pipeline. */ +@Generated +@JsonSerialize(using = SyncedTablePipelineProgress.SyncedTablePipelineProgressSerializer.class) +@JsonDeserialize(using = SyncedTablePipelineProgress.SyncedTablePipelineProgressDeserializer.class) +public class SyncedTablePipelineProgress { + /** The estimated time remaining to complete this update in seconds. */ + private Double estimatedCompletionTimeSeconds; + + /** + * The source table Delta version that was last processed by the pipeline. The pipeline may not + * have completely processed this version yet. + */ + private Long latestVersionCurrentlyProcessing; + + /** The completion ratio of this update. This is a number between 0 and 1. */ + private Double syncProgressCompletion; + + /** The number of rows that have been synced in this update. */ + private Long syncedRowCount; + + /** + * The total number of rows that need to be synced in this update. This number may be an estimate. + */ + private Long totalRowCount; + + public SyncedTablePipelineProgress setEstimatedCompletionTimeSeconds( + Double estimatedCompletionTimeSeconds) { + this.estimatedCompletionTimeSeconds = estimatedCompletionTimeSeconds; + return this; + } + + public Double getEstimatedCompletionTimeSeconds() { + return estimatedCompletionTimeSeconds; + } + + public SyncedTablePipelineProgress setLatestVersionCurrentlyProcessing( + Long latestVersionCurrentlyProcessing) { + this.latestVersionCurrentlyProcessing = latestVersionCurrentlyProcessing; + return this; + } + + public Long getLatestVersionCurrentlyProcessing() { + return latestVersionCurrentlyProcessing; + } + + public SyncedTablePipelineProgress setSyncProgressCompletion(Double syncProgressCompletion) { + this.syncProgressCompletion = syncProgressCompletion; + return this; + } + + public Double getSyncProgressCompletion() { + return syncProgressCompletion; + } + + public SyncedTablePipelineProgress setSyncedRowCount(Long syncedRowCount) { + this.syncedRowCount = syncedRowCount; + return this; + } + + public Long getSyncedRowCount() { + return syncedRowCount; + } + + public SyncedTablePipelineProgress setTotalRowCount(Long totalRowCount) { + this.totalRowCount = totalRowCount; + return this; + } + + public Long getTotalRowCount() { + return totalRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTablePipelineProgress that = (SyncedTablePipelineProgress) o; + return Objects.equals(estimatedCompletionTimeSeconds, that.estimatedCompletionTimeSeconds) + && Objects.equals(latestVersionCurrentlyProcessing, that.latestVersionCurrentlyProcessing) + && Objects.equals(syncProgressCompletion, that.syncProgressCompletion) + && Objects.equals(syncedRowCount, that.syncedRowCount) + && Objects.equals(totalRowCount, that.totalRowCount); + } + + @Override + public int hashCode() { + return Objects.hash( + estimatedCompletionTimeSeconds, + latestVersionCurrentlyProcessing, + syncProgressCompletion, + syncedRowCount, + totalRowCount); + } + + @Override + public String toString() { + return new ToStringer(SyncedTablePipelineProgress.class) + .add("estimatedCompletionTimeSeconds", estimatedCompletionTimeSeconds) + .add("latestVersionCurrentlyProcessing", latestVersionCurrentlyProcessing) + .add("syncProgressCompletion", syncProgressCompletion) + .add("syncedRowCount", syncedRowCount) + .add("totalRowCount", totalRowCount) + .toString(); + } + + SyncedTablePipelineProgressPb toPb() { + SyncedTablePipelineProgressPb pb = new SyncedTablePipelineProgressPb(); + pb.setEstimatedCompletionTimeSeconds(estimatedCompletionTimeSeconds); + pb.setLatestVersionCurrentlyProcessing(latestVersionCurrentlyProcessing); + pb.setSyncProgressCompletion(syncProgressCompletion); + pb.setSyncedRowCount(syncedRowCount); + pb.setTotalRowCount(totalRowCount); + + return pb; + } + + static SyncedTablePipelineProgress fromPb(SyncedTablePipelineProgressPb pb) { + SyncedTablePipelineProgress model = new SyncedTablePipelineProgress(); + model.setEstimatedCompletionTimeSeconds(pb.getEstimatedCompletionTimeSeconds()); + model.setLatestVersionCurrentlyProcessing(pb.getLatestVersionCurrentlyProcessing()); + model.setSyncProgressCompletion(pb.getSyncProgressCompletion()); + model.setSyncedRowCount(pb.getSyncedRowCount()); + model.setTotalRowCount(pb.getTotalRowCount()); + + return model; + } + + public static class SyncedTablePipelineProgressSerializer + extends JsonSerializer { + @Override + public void serialize( + SyncedTablePipelineProgress value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTablePipelineProgressPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTablePipelineProgressDeserializer + extends JsonDeserializer { + @Override + public SyncedTablePipelineProgress deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTablePipelineProgressPb pb = mapper.readValue(p, SyncedTablePipelineProgressPb.class); + return SyncedTablePipelineProgress.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgressPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgressPb.java new file mode 100755 index 000000000..24add6e48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgressPb.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Progress information of the Synced Table data synchronization pipeline. */ +@Generated +class SyncedTablePipelineProgressPb { + @JsonProperty("estimated_completion_time_seconds") + private Double estimatedCompletionTimeSeconds; + + @JsonProperty("latest_version_currently_processing") + private Long latestVersionCurrentlyProcessing; + + @JsonProperty("sync_progress_completion") + private Double syncProgressCompletion; + + @JsonProperty("synced_row_count") + private Long syncedRowCount; + + @JsonProperty("total_row_count") + private Long totalRowCount; + + public SyncedTablePipelineProgressPb setEstimatedCompletionTimeSeconds( + Double estimatedCompletionTimeSeconds) { + this.estimatedCompletionTimeSeconds = estimatedCompletionTimeSeconds; + return this; + } + + public Double getEstimatedCompletionTimeSeconds() { + return estimatedCompletionTimeSeconds; + } + + public SyncedTablePipelineProgressPb setLatestVersionCurrentlyProcessing( + Long latestVersionCurrentlyProcessing) { + this.latestVersionCurrentlyProcessing = latestVersionCurrentlyProcessing; + return this; + } + + public Long getLatestVersionCurrentlyProcessing() { + return latestVersionCurrentlyProcessing; + } + + public SyncedTablePipelineProgressPb setSyncProgressCompletion(Double syncProgressCompletion) { + this.syncProgressCompletion = syncProgressCompletion; + return this; + } + + public Double getSyncProgressCompletion() { + return syncProgressCompletion; + } + + public SyncedTablePipelineProgressPb setSyncedRowCount(Long syncedRowCount) { + this.syncedRowCount = syncedRowCount; + return this; + } + + public Long getSyncedRowCount() { + return syncedRowCount; + } + + public SyncedTablePipelineProgressPb setTotalRowCount(Long totalRowCount) { + this.totalRowCount = totalRowCount; + return this; + } + + public Long getTotalRowCount() { + return totalRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTablePipelineProgressPb that = (SyncedTablePipelineProgressPb) o; + return Objects.equals(estimatedCompletionTimeSeconds, that.estimatedCompletionTimeSeconds) + && Objects.equals(latestVersionCurrentlyProcessing, that.latestVersionCurrentlyProcessing) + && Objects.equals(syncProgressCompletion, that.syncProgressCompletion) + && Objects.equals(syncedRowCount, that.syncedRowCount) + && Objects.equals(totalRowCount, that.totalRowCount); + } + + @Override + public int hashCode() { + return Objects.hash( + estimatedCompletionTimeSeconds, + latestVersionCurrentlyProcessing, + syncProgressCompletion, + syncedRowCount, + totalRowCount); + } + + @Override + public String toString() { + return new ToStringer(SyncedTablePipelineProgressPb.class) + .add("estimatedCompletionTimeSeconds", estimatedCompletionTimeSeconds) + .add("latestVersionCurrentlyProcessing", latestVersionCurrentlyProcessing) + .add("syncProgressCompletion", syncProgressCompletion) + .add("syncedRowCount", syncedRowCount) + .add("totalRowCount", totalRowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java new file mode 100755 index 000000000..87f13d0d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ +@Generated +@JsonSerialize(using = SyncedTableProvisioningStatus.SyncedTableProvisioningStatusSerializer.class) +@JsonDeserialize( + using = SyncedTableProvisioningStatus.SyncedTableProvisioningStatusDeserializer.class) +public class SyncedTableProvisioningStatus { + /** + * Details about initial data synchronization. Only populated when in the + * PROVISIONING_INITIAL_SNAPSHOT state. + */ + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + public SyncedTableProvisioningStatus setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableProvisioningStatus that = (SyncedTableProvisioningStatus) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableProvisioningStatus.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .toString(); + } + + SyncedTableProvisioningStatusPb toPb() { + SyncedTableProvisioningStatusPb pb = new SyncedTableProvisioningStatusPb(); + pb.setInitialPipelineSyncProgress(initialPipelineSyncProgress); + + return pb; + } + + static SyncedTableProvisioningStatus fromPb(SyncedTableProvisioningStatusPb pb) { + SyncedTableProvisioningStatus model = new SyncedTableProvisioningStatus(); + model.setInitialPipelineSyncProgress(pb.getInitialPipelineSyncProgress()); + + return model; + } + + public static class SyncedTableProvisioningStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + SyncedTableProvisioningStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableProvisioningStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableProvisioningStatusDeserializer + extends JsonDeserializer { + @Override + public SyncedTableProvisioningStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableProvisioningStatusPb pb = + mapper.readValue(p, SyncedTableProvisioningStatusPb.class); + return SyncedTableProvisioningStatus.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatusPb.java new file mode 100755 index 000000000..85f7116bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatusPb.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ +@Generated +class SyncedTableProvisioningStatusPb { + @JsonProperty("initial_pipeline_sync_progress") + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + public SyncedTableProvisioningStatusPb setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableProvisioningStatusPb that = (SyncedTableProvisioningStatusPb) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableProvisioningStatusPb.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java index 0f0fd271d..6dbba1818 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java new file mode 100755 index 000000000..edf7c78f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java @@ -0,0 +1,196 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +/** Specification of a synced database table. */ +@Generated +@JsonSerialize(using = SyncedTableSpec.SyncedTableSpecSerializer.class) +@JsonDeserialize(using = SyncedTableSpec.SyncedTableSpecDeserializer.class) +public class SyncedTableSpec { + /** + * If true, the synced table's logical database and schema resources in PG will be created if they + * do not already exist. + */ + private Boolean createDatabaseObjectsIfMissing; + + /** Spec of new pipeline. Should be empty if pipeline_id is set */ + private NewPipelineSpec newPipelineSpec; + + /** ID of the associated pipeline. Should be empty if new_pipeline_spec is set */ + private String pipelineId; + + /** Primary Key columns to be used for data insert/update in the destination. */ + private Collection primaryKeyColumns; + + /** Scheduling policy of the underlying pipeline. */ + private SyncedTableSchedulingPolicy schedulingPolicy; + + /** Three-part (catalog, schema, table) name of the source Delta table. */ + private String sourceTableFullName; + + /** Time series key to deduplicate (tie-break) rows with the same primary key. */ + private String timeseriesKey; + + public SyncedTableSpec setCreateDatabaseObjectsIfMissing(Boolean createDatabaseObjectsIfMissing) { + this.createDatabaseObjectsIfMissing = createDatabaseObjectsIfMissing; + return this; + } + + public Boolean getCreateDatabaseObjectsIfMissing() { + return createDatabaseObjectsIfMissing; + } + + public SyncedTableSpec setNewPipelineSpec(NewPipelineSpec newPipelineSpec) { + this.newPipelineSpec = newPipelineSpec; + return this; + } + + public NewPipelineSpec getNewPipelineSpec() { + return newPipelineSpec; + } + + public SyncedTableSpec setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public SyncedTableSpec setPrimaryKeyColumns(Collection primaryKeyColumns) { + this.primaryKeyColumns = primaryKeyColumns; + return this; + } + + public Collection getPrimaryKeyColumns() { + return primaryKeyColumns; + } + + public SyncedTableSpec setSchedulingPolicy(SyncedTableSchedulingPolicy schedulingPolicy) { + this.schedulingPolicy = schedulingPolicy; + return this; + } + + public SyncedTableSchedulingPolicy getSchedulingPolicy() { + return schedulingPolicy; + } + + public SyncedTableSpec setSourceTableFullName(String sourceTableFullName) { + this.sourceTableFullName = sourceTableFullName; + return this; + } + + public String getSourceTableFullName() { + return sourceTableFullName; + } + + public SyncedTableSpec setTimeseriesKey(String timeseriesKey) { + this.timeseriesKey = timeseriesKey; + return this; + } + + public String getTimeseriesKey() { + return timeseriesKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableSpec that = (SyncedTableSpec) o; + return Objects.equals(createDatabaseObjectsIfMissing, that.createDatabaseObjectsIfMissing) + && Objects.equals(newPipelineSpec, that.newPipelineSpec) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(primaryKeyColumns, that.primaryKeyColumns) + && Objects.equals(schedulingPolicy, that.schedulingPolicy) + && Objects.equals(sourceTableFullName, that.sourceTableFullName) + && Objects.equals(timeseriesKey, that.timeseriesKey); + } + + @Override + public int hashCode() { + return Objects.hash( + createDatabaseObjectsIfMissing, + newPipelineSpec, + pipelineId, + primaryKeyColumns, + schedulingPolicy, + sourceTableFullName, + timeseriesKey); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableSpec.class) + .add("createDatabaseObjectsIfMissing", createDatabaseObjectsIfMissing) + .add("newPipelineSpec", newPipelineSpec) + .add("pipelineId", pipelineId) + .add("primaryKeyColumns", primaryKeyColumns) + .add("schedulingPolicy", schedulingPolicy) + .add("sourceTableFullName", sourceTableFullName) + .add("timeseriesKey", timeseriesKey) + .toString(); + } + + SyncedTableSpecPb toPb() { + SyncedTableSpecPb pb = new SyncedTableSpecPb(); + pb.setCreateDatabaseObjectsIfMissing(createDatabaseObjectsIfMissing); + pb.setNewPipelineSpec(newPipelineSpec); + pb.setPipelineId(pipelineId); + pb.setPrimaryKeyColumns(primaryKeyColumns); + pb.setSchedulingPolicy(schedulingPolicy); + pb.setSourceTableFullName(sourceTableFullName); + pb.setTimeseriesKey(timeseriesKey); + + return pb; + } + + static SyncedTableSpec fromPb(SyncedTableSpecPb pb) { + SyncedTableSpec model = new SyncedTableSpec(); + model.setCreateDatabaseObjectsIfMissing(pb.getCreateDatabaseObjectsIfMissing()); + model.setNewPipelineSpec(pb.getNewPipelineSpec()); + model.setPipelineId(pb.getPipelineId()); + model.setPrimaryKeyColumns(pb.getPrimaryKeyColumns()); + model.setSchedulingPolicy(pb.getSchedulingPolicy()); + model.setSourceTableFullName(pb.getSourceTableFullName()); + model.setTimeseriesKey(pb.getTimeseriesKey()); + + return model; + } + + public static class SyncedTableSpecSerializer extends JsonSerializer { + @Override + public void serialize(SyncedTableSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableSpecDeserializer extends JsonDeserializer { + @Override + public SyncedTableSpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableSpecPb pb = mapper.readValue(p, SyncedTableSpecPb.class); + return SyncedTableSpec.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpecPb.java similarity index 71% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpecPb.java index 0f7ae97ef..a55c8aa2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpecPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -10,39 +10,30 @@ /** Specification of a synced database table. */ @Generated -public class SyncedTableSpec { - /** - * If true, the synced table's logical database and schema resources in PG will be created if they - * do not already exist. - */ +class SyncedTableSpecPb { @JsonProperty("create_database_objects_if_missing") private Boolean createDatabaseObjectsIfMissing; - /** Spec of new pipeline. Should be empty if pipeline_id is set */ @JsonProperty("new_pipeline_spec") private NewPipelineSpec newPipelineSpec; - /** ID of the associated pipeline. Should be empty if new_pipeline_spec is set */ @JsonProperty("pipeline_id") private String pipelineId; - /** Primary Key columns to be used for data insert/update in the destination. */ @JsonProperty("primary_key_columns") private Collection primaryKeyColumns; - /** Scheduling policy of the underlying pipeline. */ @JsonProperty("scheduling_policy") private SyncedTableSchedulingPolicy schedulingPolicy; - /** Three-part (catalog, schema, table) name of the source Delta table. */ @JsonProperty("source_table_full_name") private String sourceTableFullName; - /** Time series key to deduplicate (tie-break) rows with the same primary key. */ @JsonProperty("timeseries_key") private String timeseriesKey; - public SyncedTableSpec setCreateDatabaseObjectsIfMissing(Boolean createDatabaseObjectsIfMissing) { + public SyncedTableSpecPb setCreateDatabaseObjectsIfMissing( + Boolean createDatabaseObjectsIfMissing) { this.createDatabaseObjectsIfMissing = createDatabaseObjectsIfMissing; return this; } @@ -51,7 +42,7 @@ public Boolean getCreateDatabaseObjectsIfMissing() { return createDatabaseObjectsIfMissing; } - public SyncedTableSpec setNewPipelineSpec(NewPipelineSpec newPipelineSpec) { + public SyncedTableSpecPb setNewPipelineSpec(NewPipelineSpec newPipelineSpec) { this.newPipelineSpec = newPipelineSpec; return this; } @@ -60,7 +51,7 @@ public NewPipelineSpec getNewPipelineSpec() { return newPipelineSpec; } - public SyncedTableSpec setPipelineId(String pipelineId) { + public SyncedTableSpecPb setPipelineId(String pipelineId) { this.pipelineId = pipelineId; return this; } @@ -69,7 +60,7 @@ public String getPipelineId() { return pipelineId; } - public SyncedTableSpec setPrimaryKeyColumns(Collection primaryKeyColumns) { + public SyncedTableSpecPb setPrimaryKeyColumns(Collection primaryKeyColumns) { this.primaryKeyColumns = primaryKeyColumns; return this; } @@ -78,7 +69,7 @@ public Collection getPrimaryKeyColumns() { return primaryKeyColumns; } - public SyncedTableSpec setSchedulingPolicy(SyncedTableSchedulingPolicy schedulingPolicy) { + public SyncedTableSpecPb setSchedulingPolicy(SyncedTableSchedulingPolicy schedulingPolicy) { this.schedulingPolicy = schedulingPolicy; return this; } @@ -87,7 +78,7 @@ public SyncedTableSchedulingPolicy getSchedulingPolicy() { return schedulingPolicy; } - public SyncedTableSpec setSourceTableFullName(String sourceTableFullName) { + public SyncedTableSpecPb setSourceTableFullName(String sourceTableFullName) { this.sourceTableFullName = sourceTableFullName; return this; } @@ -96,7 +87,7 @@ public String getSourceTableFullName() { return sourceTableFullName; } - public SyncedTableSpec setTimeseriesKey(String timeseriesKey) { + public SyncedTableSpecPb setTimeseriesKey(String timeseriesKey) { this.timeseriesKey = timeseriesKey; return this; } @@ -109,7 +100,7 @@ public String getTimeseriesKey() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - SyncedTableSpec that = (SyncedTableSpec) o; + SyncedTableSpecPb that = (SyncedTableSpecPb) o; return Objects.equals(createDatabaseObjectsIfMissing, that.createDatabaseObjectsIfMissing) && Objects.equals(newPipelineSpec, that.newPipelineSpec) && Objects.equals(pipelineId, that.pipelineId) @@ -133,7 +124,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(SyncedTableSpec.class) + return new ToStringer(SyncedTableSpecPb.class) .add("createDatabaseObjectsIfMissing", createDatabaseObjectsIfMissing) .add("newPipelineSpec", newPipelineSpec) .add("pipelineId", pipelineId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java new file mode 100755 index 000000000..f0012f316 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java @@ -0,0 +1,21 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** The state of a synced table. */ +@Generated +public enum SyncedTableState { + SYNCED_TABLED_OFFLINE, + SYNCED_TABLE_OFFLINE_FAILED, + SYNCED_TABLE_ONLINE, + SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE, + SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE, + SYNCED_TABLE_ONLINE_PIPELINE_FAILED, + SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE, + SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES, + SYNCED_TABLE_PROVISIONING, + SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT, + SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java new file mode 100755 index 000000000..f3ca2ff2a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java @@ -0,0 +1,189 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Status of a synced table. */ +@Generated +@JsonSerialize(using = SyncedTableStatus.SyncedTableStatusSerializer.class) +@JsonDeserialize(using = SyncedTableStatus.SyncedTableStatusDeserializer.class) +public class SyncedTableStatus { + /** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. + */ + private SyncedTableContinuousUpdateStatus continuousUpdateStatus; + + /** The state of the synced table. */ + private SyncedTableState detailedState; + + /** + * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + * SYNCED_PIPELINE_FAILED state. + */ + private SyncedTableFailedStatus failedStatus; + + /** A text description of the current state of the synced table. */ + private String message; + + /** + * Detailed status of a synced table. Shown if the synced table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ + private SyncedTableProvisioningStatus provisioningStatus; + + /** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE + * or the SYNCED_NO_PENDING_UPDATE state. + */ + private SyncedTableTriggeredUpdateStatus triggeredUpdateStatus; + + public SyncedTableStatus setContinuousUpdateStatus( + SyncedTableContinuousUpdateStatus continuousUpdateStatus) { + this.continuousUpdateStatus = continuousUpdateStatus; + return this; + } + + public SyncedTableContinuousUpdateStatus getContinuousUpdateStatus() { + return continuousUpdateStatus; + } + + public SyncedTableStatus setDetailedState(SyncedTableState detailedState) { + this.detailedState = detailedState; + return this; + } + + public SyncedTableState getDetailedState() { + return detailedState; + } + + public SyncedTableStatus setFailedStatus(SyncedTableFailedStatus failedStatus) { + this.failedStatus = failedStatus; + return this; + } + + public SyncedTableFailedStatus getFailedStatus() { + return failedStatus; + } + + public SyncedTableStatus setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public SyncedTableStatus setProvisioningStatus(SyncedTableProvisioningStatus provisioningStatus) { + this.provisioningStatus = provisioningStatus; + return this; + } + + public SyncedTableProvisioningStatus getProvisioningStatus() { + return provisioningStatus; + } + + public SyncedTableStatus setTriggeredUpdateStatus( + SyncedTableTriggeredUpdateStatus triggeredUpdateStatus) { + this.triggeredUpdateStatus = triggeredUpdateStatus; + return this; + } + + public SyncedTableTriggeredUpdateStatus getTriggeredUpdateStatus() { + return triggeredUpdateStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableStatus that = (SyncedTableStatus) o; + return Objects.equals(continuousUpdateStatus, that.continuousUpdateStatus) + && Objects.equals(detailedState, that.detailedState) + && Objects.equals(failedStatus, that.failedStatus) + && Objects.equals(message, that.message) + && Objects.equals(provisioningStatus, that.provisioningStatus) + && Objects.equals(triggeredUpdateStatus, that.triggeredUpdateStatus); + } + + @Override + public int hashCode() { + return Objects.hash( + continuousUpdateStatus, + detailedState, + failedStatus, + message, + provisioningStatus, + triggeredUpdateStatus); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableStatus.class) + .add("continuousUpdateStatus", continuousUpdateStatus) + .add("detailedState", detailedState) + .add("failedStatus", failedStatus) + .add("message", message) + .add("provisioningStatus", provisioningStatus) + .add("triggeredUpdateStatus", triggeredUpdateStatus) + .toString(); + } + + SyncedTableStatusPb toPb() { + SyncedTableStatusPb pb = new SyncedTableStatusPb(); + pb.setContinuousUpdateStatus(continuousUpdateStatus); + pb.setDetailedState(detailedState); + pb.setFailedStatus(failedStatus); + pb.setMessage(message); + pb.setProvisioningStatus(provisioningStatus); + pb.setTriggeredUpdateStatus(triggeredUpdateStatus); + + return pb; + } + + static SyncedTableStatus fromPb(SyncedTableStatusPb pb) { + SyncedTableStatus model = new SyncedTableStatus(); + model.setContinuousUpdateStatus(pb.getContinuousUpdateStatus()); + model.setDetailedState(pb.getDetailedState()); + model.setFailedStatus(pb.getFailedStatus()); + model.setMessage(pb.getMessage()); + model.setProvisioningStatus(pb.getProvisioningStatus()); + model.setTriggeredUpdateStatus(pb.getTriggeredUpdateStatus()); + + return model; + } + + public static class SyncedTableStatusSerializer extends JsonSerializer { + @Override + public void serialize(SyncedTableStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableStatusDeserializer extends JsonDeserializer { + @Override + public SyncedTableStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableStatusPb pb = mapper.readValue(p, SyncedTableStatusPb.class); + return SyncedTableStatus.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatusPb.java new file mode 100755 index 000000000..5be85bb32 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatusPb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status of a synced table. */ +@Generated +class SyncedTableStatusPb { + @JsonProperty("continuous_update_status") + private SyncedTableContinuousUpdateStatus continuousUpdateStatus; + + @JsonProperty("detailed_state") + private SyncedTableState detailedState; + + @JsonProperty("failed_status") + private SyncedTableFailedStatus failedStatus; + + @JsonProperty("message") + private String message; + + @JsonProperty("provisioning_status") + private SyncedTableProvisioningStatus provisioningStatus; + + @JsonProperty("triggered_update_status") + private SyncedTableTriggeredUpdateStatus triggeredUpdateStatus; + + public SyncedTableStatusPb setContinuousUpdateStatus( + SyncedTableContinuousUpdateStatus continuousUpdateStatus) { + this.continuousUpdateStatus = continuousUpdateStatus; + return this; + } + + public SyncedTableContinuousUpdateStatus getContinuousUpdateStatus() { + return continuousUpdateStatus; + } + + public SyncedTableStatusPb setDetailedState(SyncedTableState detailedState) { + this.detailedState = detailedState; + return this; + } + + public SyncedTableState getDetailedState() { + return detailedState; + } + + public SyncedTableStatusPb setFailedStatus(SyncedTableFailedStatus failedStatus) { + this.failedStatus = failedStatus; + return this; + } + + public SyncedTableFailedStatus getFailedStatus() { + return failedStatus; + } + + public SyncedTableStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public SyncedTableStatusPb setProvisioningStatus( + SyncedTableProvisioningStatus provisioningStatus) { + this.provisioningStatus = provisioningStatus; + return this; + } + + public SyncedTableProvisioningStatus getProvisioningStatus() { + return provisioningStatus; + } + + public SyncedTableStatusPb setTriggeredUpdateStatus( + SyncedTableTriggeredUpdateStatus triggeredUpdateStatus) { + this.triggeredUpdateStatus = triggeredUpdateStatus; + return this; + } + + public SyncedTableTriggeredUpdateStatus getTriggeredUpdateStatus() { + return triggeredUpdateStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableStatusPb that = (SyncedTableStatusPb) o; + return Objects.equals(continuousUpdateStatus, that.continuousUpdateStatus) + && Objects.equals(detailedState, that.detailedState) + && Objects.equals(failedStatus, that.failedStatus) + && Objects.equals(message, that.message) + && Objects.equals(provisioningStatus, that.provisioningStatus) + && Objects.equals(triggeredUpdateStatus, that.triggeredUpdateStatus); + } + + @Override + public int hashCode() { + return Objects.hash( + continuousUpdateStatus, + detailedState, + failedStatus, + message, + provisioningStatus, + triggeredUpdateStatus); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableStatusPb.class) + .add("continuousUpdateStatus", continuousUpdateStatus) + .add("detailedState", detailedState) + .add("failedStatus", failedStatus) + .add("message", message) + .add("provisioningStatus", provisioningStatus) + .add("triggeredUpdateStatus", triggeredUpdateStatus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java new file mode 100755 index 000000000..df4636b56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE or + * the SYNCED_NO_PENDING_UPDATE state. + */ +@Generated +@JsonSerialize( + using = SyncedTableTriggeredUpdateStatus.SyncedTableTriggeredUpdateStatusSerializer.class) +@JsonDeserialize( + using = SyncedTableTriggeredUpdateStatus.SyncedTableTriggeredUpdateStatusDeserializer.class) +public class SyncedTableTriggeredUpdateStatus { + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may not be completely synced to the synced table yet. + */ + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. + */ + private String timestamp; + + /** Progress of the active data synchronization pipeline. */ + private SyncedTablePipelineProgress triggeredUpdateProgress; + + public SyncedTableTriggeredUpdateStatus setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableTriggeredUpdateStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public SyncedTableTriggeredUpdateStatus setTriggeredUpdateProgress( + SyncedTablePipelineProgress triggeredUpdateProgress) { + this.triggeredUpdateProgress = triggeredUpdateProgress; + return this; + } + + public SyncedTablePipelineProgress getTriggeredUpdateProgress() { + return triggeredUpdateProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableTriggeredUpdateStatus that = (SyncedTableTriggeredUpdateStatus) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(triggeredUpdateProgress, that.triggeredUpdateProgress); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp, triggeredUpdateProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableTriggeredUpdateStatus.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .add("triggeredUpdateProgress", triggeredUpdateProgress) + .toString(); + } + + SyncedTableTriggeredUpdateStatusPb toPb() { + SyncedTableTriggeredUpdateStatusPb pb = new SyncedTableTriggeredUpdateStatusPb(); + pb.setLastProcessedCommitVersion(lastProcessedCommitVersion); + pb.setTimestamp(timestamp); + pb.setTriggeredUpdateProgress(triggeredUpdateProgress); + + return pb; + } + + static SyncedTableTriggeredUpdateStatus fromPb(SyncedTableTriggeredUpdateStatusPb pb) { + SyncedTableTriggeredUpdateStatus model = new SyncedTableTriggeredUpdateStatus(); + model.setLastProcessedCommitVersion(pb.getLastProcessedCommitVersion()); + model.setTimestamp(pb.getTimestamp()); + model.setTriggeredUpdateProgress(pb.getTriggeredUpdateProgress()); + + return model; + } + + public static class SyncedTableTriggeredUpdateStatusSerializer + extends JsonSerializer { + @Override + public void serialize( + SyncedTableTriggeredUpdateStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncedTableTriggeredUpdateStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncedTableTriggeredUpdateStatusDeserializer + extends JsonDeserializer { + @Override + public SyncedTableTriggeredUpdateStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncedTableTriggeredUpdateStatusPb pb = + mapper.readValue(p, SyncedTableTriggeredUpdateStatusPb.class); + return SyncedTableTriggeredUpdateStatus.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatusPb.java new file mode 100755 index 000000000..144f8a4a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatusPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE or + * the SYNCED_NO_PENDING_UPDATE state. + */ +@Generated +class SyncedTableTriggeredUpdateStatusPb { + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + @JsonProperty("timestamp") + private String timestamp; + + @JsonProperty("triggered_update_progress") + private SyncedTablePipelineProgress triggeredUpdateProgress; + + public SyncedTableTriggeredUpdateStatusPb setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableTriggeredUpdateStatusPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public SyncedTableTriggeredUpdateStatusPb setTriggeredUpdateProgress( + SyncedTablePipelineProgress triggeredUpdateProgress) { + this.triggeredUpdateProgress = triggeredUpdateProgress; + return this; + } + + public SyncedTablePipelineProgress getTriggeredUpdateProgress() { + return triggeredUpdateProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableTriggeredUpdateStatusPb that = (SyncedTableTriggeredUpdateStatusPb) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(triggeredUpdateProgress, that.triggeredUpdateProgress); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp, triggeredUpdateProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableTriggeredUpdateStatusPb.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .add("triggeredUpdateProgress", triggeredUpdateProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java new file mode 100755 index 000000000..7dd109d33 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Update a Database Instance */ +@Generated +@JsonSerialize(using = UpdateDatabaseInstanceRequest.UpdateDatabaseInstanceRequestSerializer.class) +@JsonDeserialize( + using = UpdateDatabaseInstanceRequest.UpdateDatabaseInstanceRequestDeserializer.class) +public class UpdateDatabaseInstanceRequest { + /** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and + * storage. + */ + private DatabaseInstance databaseInstance; + + /** The name of the instance. This is the unique identifier for the instance. */ + private String name; + + /** The list of fields to update. */ + private String updateMask; + + public UpdateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + this.databaseInstance = databaseInstance; + return this; + } + + public DatabaseInstance getDatabaseInstance() { + return databaseInstance; + } + + public UpdateDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateDatabaseInstanceRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseInstanceRequest that = (UpdateDatabaseInstanceRequest) o; + return Objects.equals(databaseInstance, that.databaseInstance) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstance, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseInstanceRequest.class) + .add("databaseInstance", databaseInstance) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } + + UpdateDatabaseInstanceRequestPb toPb() { + UpdateDatabaseInstanceRequestPb pb = new UpdateDatabaseInstanceRequestPb(); + pb.setDatabaseInstance(databaseInstance); + pb.setName(name); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateDatabaseInstanceRequest fromPb(UpdateDatabaseInstanceRequestPb pb) { + UpdateDatabaseInstanceRequest model = new UpdateDatabaseInstanceRequest(); + model.setDatabaseInstance(pb.getDatabaseInstance()); + model.setName(pb.getName()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateDatabaseInstanceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDatabaseInstanceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDatabaseInstanceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDatabaseInstanceRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDatabaseInstanceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDatabaseInstanceRequestPb pb = + mapper.readValue(p, UpdateDatabaseInstanceRequestPb.class); + return UpdateDatabaseInstanceRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequestPb.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequestPb.java index d40d63ba7..887036f88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequestPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; @@ -11,23 +11,17 @@ /** Update a Database Instance */ @Generated -public class UpdateDatabaseInstanceRequest { - /** - * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and - * storage. - */ +class UpdateDatabaseInstanceRequestPb { @JsonProperty("database_instance") private DatabaseInstance databaseInstance; - /** The name of the instance. This is the unique identifier for the instance. */ @JsonIgnore private String name; - /** The list of fields to update. */ @JsonIgnore @QueryParam("update_mask") private String updateMask; - public UpdateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + public UpdateDatabaseInstanceRequestPb setDatabaseInstance(DatabaseInstance databaseInstance) { this.databaseInstance = databaseInstance; return this; } @@ -36,7 +30,7 @@ public DatabaseInstance getDatabaseInstance() { return databaseInstance; } - public UpdateDatabaseInstanceRequest setName(String name) { + public UpdateDatabaseInstanceRequestPb setName(String name) { this.name = name; return this; } @@ -45,7 +39,7 @@ public String getName() { return name; } - public UpdateDatabaseInstanceRequest setUpdateMask(String updateMask) { + public UpdateDatabaseInstanceRequestPb setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @@ -58,7 +52,7 @@ public String getUpdateMask() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - UpdateDatabaseInstanceRequest that = (UpdateDatabaseInstanceRequest) o; + UpdateDatabaseInstanceRequestPb that = (UpdateDatabaseInstanceRequestPb) o; return Objects.equals(databaseInstance, that.databaseInstance) && Objects.equals(name, that.name) && Objects.equals(updateMask, that.updateMask); @@ -71,7 +65,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateDatabaseInstanceRequest.class) + return new ToStringer(UpdateDatabaseInstanceRequestPb.class) .add("databaseInstance", databaseInstance) .add("name", name) .add("updateMask", updateMask) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java index 00acf2b3a..5c44fc6d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddBlock.AddBlockSerializer.class) +@JsonDeserialize(using = AddBlock.AddBlockDeserializer.class) public class AddBlock { /** The base64-encoded data to append to the stream. This has a limit of 1 MB. */ - @JsonProperty("data") private String data; /** The handle on an open stream. */ - @JsonProperty("handle") private Long handle; public AddBlock setData(String data) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(AddBlock.class).add("data", data).add("handle", handle).toString(); } + + AddBlockPb toPb() { + AddBlockPb pb = new AddBlockPb(); + pb.setData(data); + pb.setHandle(handle); + + return pb; + } + + static AddBlock fromPb(AddBlockPb pb) { + AddBlock model = new AddBlock(); + model.setData(pb.getData()); + model.setHandle(pb.getHandle()); + + return model; + } + + public static class AddBlockSerializer extends JsonSerializer { + @Override + public void serialize(AddBlock value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddBlockPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddBlockDeserializer extends JsonDeserializer { + @Override + public AddBlock deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddBlockPb pb = mapper.readValue(p, AddBlockPb.class); + return AddBlock.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockPb.java new file mode 100755 index 000000000..c30e6a80f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AddBlockPb { + @JsonProperty("data") + private String data; + + @JsonProperty("handle") + private Long handle; + + public AddBlockPb setData(String data) { + this.data = data; + return this; + } + + public String getData() { + return data; + } + + public AddBlockPb setHandle(Long handle) { + this.handle = handle; + return this; + } + + public Long getHandle() { + return handle; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddBlockPb that = (AddBlockPb) o; + return Objects.equals(data, that.data) && Objects.equals(handle, that.handle); + } + + @Override + public int hashCode() { + return Objects.hash(data, handle); + } + + @Override + public String toString() { + return new ToStringer(AddBlockPb.class).add("data", data).add("handle", handle).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java index 8d7475d9d..f8fa731ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddBlockResponse.AddBlockResponseSerializer.class) +@JsonDeserialize(using = AddBlockResponse.AddBlockResponseDeserializer.class) public class AddBlockResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(AddBlockResponse.class).toString(); } + + AddBlockResponsePb toPb() { + AddBlockResponsePb pb = new AddBlockResponsePb(); + + return pb; + } + + static AddBlockResponse fromPb(AddBlockResponsePb pb) { + AddBlockResponse model = new AddBlockResponse(); + + return model; + } + + public static class AddBlockResponseSerializer extends JsonSerializer { + @Override + public void serialize(AddBlockResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddBlockResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddBlockResponseDeserializer extends JsonDeserializer { + @Override + public AddBlockResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddBlockResponsePb pb = mapper.readValue(p, AddBlockResponsePb.class); + return AddBlockResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponsePb.java new file mode 100755 index 000000000..cdd884563 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class AddBlockResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AddBlockResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java index 91fb3213c..4ba295834 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Close.CloseSerializer.class) +@JsonDeserialize(using = Close.CloseDeserializer.class) public class Close { /** The handle on an open stream. */ - @JsonProperty("handle") private Long handle; public Close setHandle(Long handle) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Close.class).add("handle", handle).toString(); } + + ClosePb toPb() { + ClosePb pb = new ClosePb(); + pb.setHandle(handle); + + return pb; + } + + static Close fromPb(ClosePb pb) { + Close model = new Close(); + model.setHandle(pb.getHandle()); + + return model; + } + + public static class CloseSerializer extends JsonSerializer { + @Override + public void serialize(Close value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClosePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloseDeserializer extends JsonDeserializer { + @Override + public Close deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClosePb pb = mapper.readValue(p, ClosePb.class); + return Close.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ClosePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ClosePb.java new file mode 100755 index 000000000..3b647f441 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ClosePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClosePb { + @JsonProperty("handle") + private Long handle; + + public ClosePb setHandle(Long handle) { + this.handle = handle; + return this; + } + + public Long getHandle() { + return handle; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClosePb that = (ClosePb) o; + return Objects.equals(handle, that.handle); + } + + @Override + public int hashCode() { + return Objects.hash(handle); + } + + @Override + public String toString() { + return new ToStringer(ClosePb.class).add("handle", handle).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java index 8126adce4..3160aee58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CloseResponse.CloseResponseSerializer.class) +@JsonDeserialize(using = CloseResponse.CloseResponseDeserializer.class) public class CloseResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(CloseResponse.class).toString(); } + + CloseResponsePb toPb() { + CloseResponsePb pb = new CloseResponsePb(); + + return pb; + } + + static CloseResponse fromPb(CloseResponsePb pb) { + CloseResponse model = new CloseResponse(); + + return model; + } + + public static class CloseResponseSerializer extends JsonSerializer { + @Override + public void serialize(CloseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CloseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloseResponseDeserializer extends JsonDeserializer { + @Override + public CloseResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CloseResponsePb pb = mapper.readValue(p, CloseResponsePb.class); + return CloseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponsePb.java new file mode 100755 index 000000000..87c211b51 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CloseResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CloseResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Converters.java new file mode 100755 index 000000000..4bc65e6f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.files; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java index d2129223c..ada968ac1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Create.CreateSerializer.class) +@JsonDeserialize(using = Create.CreateDeserializer.class) public class Create { /** The flag that specifies whether to overwrite existing file/files. */ - @JsonProperty("overwrite") private Boolean overwrite; /** The path of the new file. The path should be the absolute DBFS path. */ - @JsonProperty("path") private String path; public Create setOverwrite(Boolean overwrite) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(Create.class).add("overwrite", overwrite).add("path", path).toString(); } + + CreatePb toPb() { + CreatePb pb = new CreatePb(); + pb.setOverwrite(overwrite); + pb.setPath(path); + + return pb; + } + + static Create fromPb(CreatePb pb) { + Create model = new Create(); + model.setOverwrite(pb.getOverwrite()); + model.setPath(pb.getPath()); + + return model; + } + + public static class CreateSerializer extends JsonSerializer { + @Override + public void serialize(Create value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDeserializer extends JsonDeserializer { + @Override + public Create deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePb pb = mapper.readValue(p, CreatePb.class); + return Create.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java index 6b8708368..1e6b84279 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create a directory */ @Generated +@JsonSerialize(using = CreateDirectoryRequest.CreateDirectoryRequestSerializer.class) +@JsonDeserialize(using = CreateDirectoryRequest.CreateDirectoryRequestDeserializer.class) public class CreateDirectoryRequest { /** The absolute path of a directory. */ - @JsonIgnore private String directoryPath; + private String directoryPath; public CreateDirectoryRequest setDirectoryPath(String directoryPath) { this.directoryPath = directoryPath; @@ -41,4 +52,41 @@ public String toString() { .add("directoryPath", directoryPath) .toString(); } + + CreateDirectoryRequestPb toPb() { + CreateDirectoryRequestPb pb = new CreateDirectoryRequestPb(); + pb.setDirectoryPath(directoryPath); + + return pb; + } + + static CreateDirectoryRequest fromPb(CreateDirectoryRequestPb pb) { + CreateDirectoryRequest model = new CreateDirectoryRequest(); + model.setDirectoryPath(pb.getDirectoryPath()); + + return model; + } + + public static class CreateDirectoryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDirectoryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDirectoryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDirectoryRequestDeserializer + extends JsonDeserializer { + @Override + public CreateDirectoryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDirectoryRequestPb pb = mapper.readValue(p, CreateDirectoryRequestPb.class); + return CreateDirectoryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequestPb.java new file mode 100755 index 000000000..20ea081a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Create a directory */ +@Generated +class CreateDirectoryRequestPb { + @JsonIgnore private String directoryPath; + + public CreateDirectoryRequestPb setDirectoryPath(String directoryPath) { + this.directoryPath = directoryPath; + return this; + } + + public String getDirectoryPath() { + return directoryPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDirectoryRequestPb that = (CreateDirectoryRequestPb) o; + return Objects.equals(directoryPath, that.directoryPath); + } + + @Override + public int hashCode() { + return Objects.hash(directoryPath); + } + + @Override + public String toString() { + return new ToStringer(CreateDirectoryRequestPb.class) + .add("directoryPath", directoryPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java index 63bfd628e..c5f98bfb4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateDirectoryResponse.CreateDirectoryResponseSerializer.class) +@JsonDeserialize(using = CreateDirectoryResponse.CreateDirectoryResponseDeserializer.class) public class CreateDirectoryResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(CreateDirectoryResponse.class).toString(); } + + CreateDirectoryResponsePb toPb() { + CreateDirectoryResponsePb pb = new CreateDirectoryResponsePb(); + + return pb; + } + + static CreateDirectoryResponse fromPb(CreateDirectoryResponsePb pb) { + CreateDirectoryResponse model = new CreateDirectoryResponse(); + + return model; + } + + public static class CreateDirectoryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateDirectoryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateDirectoryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateDirectoryResponseDeserializer + extends JsonDeserializer { + @Override + public CreateDirectoryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateDirectoryResponsePb pb = mapper.readValue(p, CreateDirectoryResponsePb.class); + return CreateDirectoryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponsePb.java new file mode 100755 index 000000000..3431e0e04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CreateDirectoryResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CreateDirectoryResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreatePb.java new file mode 100755 index 000000000..94444c33e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreatePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePb { + @JsonProperty("overwrite") + private Boolean overwrite; + + @JsonProperty("path") + private String path; + + public CreatePb setOverwrite(Boolean overwrite) { + this.overwrite = overwrite; + return this; + } + + public Boolean getOverwrite() { + return overwrite; + } + + public CreatePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePb that = (CreatePb) o; + return Objects.equals(overwrite, that.overwrite) && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(overwrite, path); + } + + @Override + public String toString() { + return new ToStringer(CreatePb.class).add("overwrite", overwrite).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java index 1649e61ea..0445ebb17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateResponse.CreateResponseSerializer.class) +@JsonDeserialize(using = CreateResponse.CreateResponseDeserializer.class) public class CreateResponse { /** * Handle which should subsequently be passed into the AddBlock and Close calls when writing to a * file through a stream. */ - @JsonProperty("handle") private Long handle; public CreateResponse setHandle(Long handle) { @@ -42,4 +52,38 @@ public int hashCode() { public String toString() { return new ToStringer(CreateResponse.class).add("handle", handle).toString(); } + + CreateResponsePb toPb() { + CreateResponsePb pb = new CreateResponsePb(); + pb.setHandle(handle); + + return pb; + } + + static CreateResponse fromPb(CreateResponsePb pb) { + CreateResponse model = new CreateResponse(); + model.setHandle(pb.getHandle()); + + return model; + } + + public static class CreateResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateResponseDeserializer extends JsonDeserializer { + @Override + public CreateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateResponsePb pb = mapper.readValue(p, CreateResponsePb.class); + return CreateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponsePb.java new file mode 100755 index 000000000..6a00d0e4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateResponsePb { + @JsonProperty("handle") + private Long handle; + + public CreateResponsePb setHandle(Long handle) { + this.handle = handle; + return this; + } + + public Long getHandle() { + return handle; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateResponsePb that = (CreateResponsePb) o; + return Objects.equals(handle, that.handle); + } + + @Override + public int hashCode() { + return Objects.hash(handle); + } + + @Override + public String toString() { + return new ToStringer(CreateResponsePb.class).add("handle", handle).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java index 1887e9e8c..90bbd060e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java @@ -21,7 +21,7 @@ public void addBlock(AddBlock request) { String path = "/api/2.0/dbfs/add-block"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, AddBlockResponse.class); @@ -35,7 +35,7 @@ public void close(Close request) { String path = "/api/2.0/dbfs/close"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CloseResponse.class); @@ -49,7 +49,7 @@ public CreateResponse create(Create request) { String path = "/api/2.0/dbfs/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateResponse.class); @@ -63,7 +63,7 @@ public void delete(Delete request) { String path = "/api/2.0/dbfs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteResponse.class); @@ -77,7 +77,7 @@ public FileInfo getStatus(GetStatusRequest request) { String path = "/api/2.0/dbfs/get-status"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FileInfo.class); } catch (IOException e) { @@ -90,7 +90,7 @@ public ListStatusResponse list(ListDbfsRequest request) { String path = "/api/2.0/dbfs/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListStatusResponse.class); } catch (IOException e) { @@ -103,7 +103,7 @@ public void mkdirs(MkDirs request) { String path = "/api/2.0/dbfs/mkdirs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, MkDirsResponse.class); @@ -117,7 +117,7 @@ public void move(Move request) { String path = "/api/2.0/dbfs/move"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, MoveResponse.class); @@ -131,7 +131,7 @@ public void put(Put request) { String path = "/api/2.0/dbfs/put"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PutResponse.class); @@ -145,7 +145,7 @@ public ReadResponse read(ReadDbfsRequest request) { String path = "/api/2.0/dbfs/read"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ReadResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java index 7214e8693..94a7ef816 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Delete.DeleteSerializer.class) +@JsonDeserialize(using = Delete.DeleteDeserializer.class) public class Delete { /** The path of the file or directory to delete. The path should be the absolute DBFS path. */ - @JsonProperty("path") private String path; /** * Whether or not to recursively delete the directory's contents. Deleting empty directories can * be done without providing the recursive flag. */ - @JsonProperty("recursive") private Boolean recursive; public Delete setPath(String path) { @@ -55,4 +64,39 @@ public int hashCode() { public String toString() { return new ToStringer(Delete.class).add("path", path).add("recursive", recursive).toString(); } + + DeletePb toPb() { + DeletePb pb = new DeletePb(); + pb.setPath(path); + pb.setRecursive(recursive); + + return pb; + } + + static Delete fromPb(DeletePb pb) { + Delete model = new Delete(); + model.setPath(pb.getPath()); + model.setRecursive(pb.getRecursive()); + + return model; + } + + public static class DeleteSerializer extends JsonSerializer { + @Override + public void serialize(Delete value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDeserializer extends JsonDeserializer { + @Override + public Delete deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePb pb = mapper.readValue(p, DeletePb.class); + return Delete.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java index 462b1a8a4..85546e002 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a directory */ @Generated +@JsonSerialize(using = DeleteDirectoryRequest.DeleteDirectoryRequestSerializer.class) +@JsonDeserialize(using = DeleteDirectoryRequest.DeleteDirectoryRequestDeserializer.class) public class DeleteDirectoryRequest { /** The absolute path of a directory. */ - @JsonIgnore private String directoryPath; + private String directoryPath; public DeleteDirectoryRequest setDirectoryPath(String directoryPath) { this.directoryPath = directoryPath; @@ -41,4 +52,41 @@ public String toString() { .add("directoryPath", directoryPath) .toString(); } + + DeleteDirectoryRequestPb toPb() { + DeleteDirectoryRequestPb pb = new DeleteDirectoryRequestPb(); + pb.setDirectoryPath(directoryPath); + + return pb; + } + + static DeleteDirectoryRequest fromPb(DeleteDirectoryRequestPb pb) { + DeleteDirectoryRequest model = new DeleteDirectoryRequest(); + model.setDirectoryPath(pb.getDirectoryPath()); + + return model; + } + + public static class DeleteDirectoryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDirectoryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDirectoryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDirectoryRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDirectoryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDirectoryRequestPb pb = mapper.readValue(p, DeleteDirectoryRequestPb.class); + return DeleteDirectoryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequestPb.java new file mode 100755 index 000000000..75ac8a345 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a directory */ +@Generated +class DeleteDirectoryRequestPb { + @JsonIgnore private String directoryPath; + + public DeleteDirectoryRequestPb setDirectoryPath(String directoryPath) { + this.directoryPath = directoryPath; + return this; + } + + public String getDirectoryPath() { + return directoryPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDirectoryRequestPb that = (DeleteDirectoryRequestPb) o; + return Objects.equals(directoryPath, that.directoryPath); + } + + @Override + public int hashCode() { + return Objects.hash(directoryPath); + } + + @Override + public String toString() { + return new ToStringer(DeleteDirectoryRequestPb.class) + .add("directoryPath", directoryPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java index bd1a5f4b6..27938d21e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteDirectoryResponse.DeleteDirectoryResponseSerializer.class) +@JsonDeserialize(using = DeleteDirectoryResponse.DeleteDirectoryResponseDeserializer.class) public class DeleteDirectoryResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDirectoryResponse.class).toString(); } + + DeleteDirectoryResponsePb toPb() { + DeleteDirectoryResponsePb pb = new DeleteDirectoryResponsePb(); + + return pb; + } + + static DeleteDirectoryResponse fromPb(DeleteDirectoryResponsePb pb) { + DeleteDirectoryResponse model = new DeleteDirectoryResponse(); + + return model; + } + + public static class DeleteDirectoryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDirectoryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDirectoryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDirectoryResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDirectoryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDirectoryResponsePb pb = mapper.readValue(p, DeleteDirectoryResponsePb.class); + return DeleteDirectoryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponsePb.java new file mode 100755 index 000000000..19b7f203b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteDirectoryResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDirectoryResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java index 14fb11c3e..271b02059 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a file */ @Generated +@JsonSerialize(using = DeleteFileRequest.DeleteFileRequestSerializer.class) +@JsonDeserialize(using = DeleteFileRequest.DeleteFileRequestDeserializer.class) public class DeleteFileRequest { /** The absolute path of the file. */ - @JsonIgnore private String filePath; + private String filePath; public DeleteFileRequest setFilePath(String filePath) { this.filePath = filePath; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteFileRequest.class).add("filePath", filePath).toString(); } + + DeleteFileRequestPb toPb() { + DeleteFileRequestPb pb = new DeleteFileRequestPb(); + pb.setFilePath(filePath); + + return pb; + } + + static DeleteFileRequest fromPb(DeleteFileRequestPb pb) { + DeleteFileRequest model = new DeleteFileRequest(); + model.setFilePath(pb.getFilePath()); + + return model; + } + + public static class DeleteFileRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteFileRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteFileRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteFileRequestDeserializer extends JsonDeserializer { + @Override + public DeleteFileRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteFileRequestPb pb = mapper.readValue(p, DeleteFileRequestPb.class); + return DeleteFileRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequestPb.java new file mode 100755 index 000000000..a29096c5f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a file */ +@Generated +class DeleteFileRequestPb { + @JsonIgnore private String filePath; + + public DeleteFileRequestPb setFilePath(String filePath) { + this.filePath = filePath; + return this; + } + + public String getFilePath() { + return filePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFileRequestPb that = (DeleteFileRequestPb) o; + return Objects.equals(filePath, that.filePath); + } + + @Override + public int hashCode() { + return Objects.hash(filePath); + } + + @Override + public String toString() { + return new ToStringer(DeleteFileRequestPb.class).add("filePath", filePath).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeletePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeletePb.java new file mode 100755 index 000000000..6c3223677 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeletePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeletePb { + @JsonProperty("path") + private String path; + + @JsonProperty("recursive") + private Boolean recursive; + + public DeletePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public DeletePb setRecursive(Boolean recursive) { + this.recursive = recursive; + return this; + } + + public Boolean getRecursive() { + return recursive; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePb that = (DeletePb) o; + return Objects.equals(path, that.path) && Objects.equals(recursive, that.recursive); + } + + @Override + public int hashCode() { + return Objects.hash(path, recursive); + } + + @Override + public String toString() { + return new ToStringer(DeletePb.class).add("path", path).add("recursive", recursive).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java index 6d741c8d3..b14c3bf7c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponsePb.java new file mode 100755 index 000000000..d2691750f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java index 53339e663..04ebb9baa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DirectoryEntry.DirectoryEntrySerializer.class) +@JsonDeserialize(using = DirectoryEntry.DirectoryEntryDeserializer.class) public class DirectoryEntry { /** The length of the file in bytes. This field is omitted for directories. */ - @JsonProperty("file_size") private Long fileSize; /** True if the path is a directory. */ - @JsonProperty("is_directory") private Boolean isDirectory; /** Last modification time of given file in milliseconds since unix epoch. */ - @JsonProperty("last_modified") private Long lastModified; /** The name of the file or directory. This is the last component of the path. */ - @JsonProperty("name") private String name; /** The absolute path of the file or directory. */ - @JsonProperty("path") private String path; public DirectoryEntry setFileSize(Long fileSize) { @@ -101,4 +107,46 @@ public String toString() { .add("path", path) .toString(); } + + DirectoryEntryPb toPb() { + DirectoryEntryPb pb = new DirectoryEntryPb(); + pb.setFileSize(fileSize); + pb.setIsDirectory(isDirectory); + pb.setLastModified(lastModified); + pb.setName(name); + pb.setPath(path); + + return pb; + } + + static DirectoryEntry fromPb(DirectoryEntryPb pb) { + DirectoryEntry model = new DirectoryEntry(); + model.setFileSize(pb.getFileSize()); + model.setIsDirectory(pb.getIsDirectory()); + model.setLastModified(pb.getLastModified()); + model.setName(pb.getName()); + model.setPath(pb.getPath()); + + return model; + } + + public static class DirectoryEntrySerializer extends JsonSerializer { + @Override + public void serialize(DirectoryEntry value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DirectoryEntryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DirectoryEntryDeserializer extends JsonDeserializer { + @Override + public DirectoryEntry deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DirectoryEntryPb pb = mapper.readValue(p, DirectoryEntryPb.class); + return DirectoryEntry.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntryPb.java new file mode 100755 index 000000000..7c90f2810 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntryPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DirectoryEntryPb { + @JsonProperty("file_size") + private Long fileSize; + + @JsonProperty("is_directory") + private Boolean isDirectory; + + @JsonProperty("last_modified") + private Long lastModified; + + @JsonProperty("name") + private String name; + + @JsonProperty("path") + private String path; + + public DirectoryEntryPb setFileSize(Long fileSize) { + this.fileSize = fileSize; + return this; + } + + public Long getFileSize() { + return fileSize; + } + + public DirectoryEntryPb setIsDirectory(Boolean isDirectory) { + this.isDirectory = isDirectory; + return this; + } + + public Boolean getIsDirectory() { + return isDirectory; + } + + public DirectoryEntryPb setLastModified(Long lastModified) { + this.lastModified = lastModified; + return this; + } + + public Long getLastModified() { + return lastModified; + } + + public DirectoryEntryPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DirectoryEntryPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DirectoryEntryPb that = (DirectoryEntryPb) o; + return Objects.equals(fileSize, that.fileSize) + && Objects.equals(isDirectory, that.isDirectory) + && Objects.equals(lastModified, that.lastModified) + && Objects.equals(name, that.name) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(fileSize, isDirectory, lastModified, name, path); + } + + @Override + public String toString() { + return new ToStringer(DirectoryEntryPb.class) + .add("fileSize", fileSize) + .add("isDirectory", isDirectory) + .add("lastModified", lastModified) + .add("name", name) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java index 91e5cefa5..e7589b350 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Download a file */ @Generated +@JsonSerialize(using = DownloadRequest.DownloadRequestSerializer.class) +@JsonDeserialize(using = DownloadRequest.DownloadRequestDeserializer.class) public class DownloadRequest { /** The absolute path of the file. */ - @JsonIgnore private String filePath; + private String filePath; public DownloadRequest setFilePath(String filePath) { this.filePath = filePath; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DownloadRequest.class).add("filePath", filePath).toString(); } + + DownloadRequestPb toPb() { + DownloadRequestPb pb = new DownloadRequestPb(); + pb.setFilePath(filePath); + + return pb; + } + + static DownloadRequest fromPb(DownloadRequestPb pb) { + DownloadRequest model = new DownloadRequest(); + model.setFilePath(pb.getFilePath()); + + return model; + } + + public static class DownloadRequestSerializer extends JsonSerializer { + @Override + public void serialize(DownloadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DownloadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DownloadRequestDeserializer extends JsonDeserializer { + @Override + public DownloadRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DownloadRequestPb pb = mapper.readValue(p, DownloadRequestPb.class); + return DownloadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequestPb.java new file mode 100755 index 000000000..eb637572c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Download a file */ +@Generated +class DownloadRequestPb { + @JsonIgnore private String filePath; + + public DownloadRequestPb setFilePath(String filePath) { + this.filePath = filePath; + return this; + } + + public String getFilePath() { + return filePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DownloadRequestPb that = (DownloadRequestPb) o; + return Objects.equals(filePath, that.filePath); + } + + @Override + public int hashCode() { + return Objects.hash(filePath); + } + + @Override + public String toString() { + return new ToStringer(DownloadRequestPb.class).add("filePath", filePath).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java index dc199e942..f7ad693bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java @@ -3,30 +3,34 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Header; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; @Generated +@JsonSerialize(using = DownloadResponse.DownloadResponseSerializer.class) +@JsonDeserialize(using = DownloadResponse.DownloadResponseDeserializer.class) public class DownloadResponse { /** The length of the HTTP response body in bytes. */ - @JsonIgnore - @Header("content-length") private Long contentLength; /** */ - @JsonIgnore - @Header("content-type") private String contentType; /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; /** The last modified time of the file in HTTP-date (RFC 7231) format. */ - @JsonIgnore - @Header("last-modified") private String lastModified; public DownloadResponse setContentLength(Long contentLength) { @@ -90,4 +94,44 @@ public String toString() { .add("lastModified", lastModified) .toString(); } + + DownloadResponsePb toPb() { + DownloadResponsePb pb = new DownloadResponsePb(); + pb.setContentLength(contentLength); + pb.setContentType(contentType); + pb.setContents(contents); + pb.setLastModified(lastModified); + + return pb; + } + + static DownloadResponse fromPb(DownloadResponsePb pb) { + DownloadResponse model = new DownloadResponse(); + model.setContentLength(pb.getContentLength()); + model.setContentType(pb.getContentType()); + model.setContents(pb.getContents()); + model.setLastModified(pb.getLastModified()); + + return model; + } + + public static class DownloadResponseSerializer extends JsonSerializer { + @Override + public void serialize(DownloadResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DownloadResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DownloadResponseDeserializer extends JsonDeserializer { + @Override + public DownloadResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DownloadResponsePb pb = mapper.readValue(p, DownloadResponsePb.class); + return DownloadResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponsePb.java new file mode 100755 index 000000000..7aab8847a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponsePb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Header; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +@Generated +class DownloadResponsePb { + @JsonIgnore + @Header("content-length") + private Long contentLength; + + @JsonIgnore + @Header("content-type") + private String contentType; + + @JsonIgnore private InputStream contents; + + @JsonIgnore + @Header("last-modified") + private String lastModified; + + public DownloadResponsePb setContentLength(Long contentLength) { + this.contentLength = contentLength; + return this; + } + + public Long getContentLength() { + return contentLength; + } + + public DownloadResponsePb setContentType(String contentType) { + this.contentType = contentType; + return this; + } + + public String getContentType() { + return contentType; + } + + public DownloadResponsePb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + public DownloadResponsePb setLastModified(String lastModified) { + this.lastModified = lastModified; + return this; + } + + public String getLastModified() { + return lastModified; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DownloadResponsePb that = (DownloadResponsePb) o; + return Objects.equals(contentLength, that.contentLength) + && Objects.equals(contentType, that.contentType) + && Objects.equals(contents, that.contents) + && Objects.equals(lastModified, that.lastModified); + } + + @Override + public int hashCode() { + return Objects.hash(contentLength, contentType, contents, lastModified); + } + + @Override + public String toString() { + return new ToStringer(DownloadResponsePb.class) + .add("contentLength", contentLength) + .add("contentType", contentType) + .add("contents", contents) + .add("lastModified", lastModified) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfo.java index 43e6185ee..fc9d1c8b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfo.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FileInfo.FileInfoSerializer.class) +@JsonDeserialize(using = FileInfo.FileInfoDeserializer.class) public class FileInfo { /** The length of the file in bytes. This field is omitted for directories. */ - @JsonProperty("file_size") private Long fileSize; /** True if the path is a directory. */ - @JsonProperty("is_dir") private Boolean isDir; /** Last modification time of given file in milliseconds since epoch. */ - @JsonProperty("modification_time") private Long modificationTime; /** The absolute path of the file or directory. */ - @JsonProperty("path") private String path; public FileInfo setFileSize(Long fileSize) { @@ -86,4 +93,43 @@ public String toString() { .add("path", path) .toString(); } + + FileInfoPb toPb() { + FileInfoPb pb = new FileInfoPb(); + pb.setFileSize(fileSize); + pb.setIsDir(isDir); + pb.setModificationTime(modificationTime); + pb.setPath(path); + + return pb; + } + + static FileInfo fromPb(FileInfoPb pb) { + FileInfo model = new FileInfo(); + model.setFileSize(pb.getFileSize()); + model.setIsDir(pb.getIsDir()); + model.setModificationTime(pb.getModificationTime()); + model.setPath(pb.getPath()); + + return model; + } + + public static class FileInfoSerializer extends JsonSerializer { + @Override + public void serialize(FileInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileInfoDeserializer extends JsonDeserializer { + @Override + public FileInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileInfoPb pb = mapper.readValue(p, FileInfoPb.class); + return FileInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfoPb.java new file mode 100755 index 000000000..619b4e09c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FileInfoPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileInfoPb { + @JsonProperty("file_size") + private Long fileSize; + + @JsonProperty("is_dir") + private Boolean isDir; + + @JsonProperty("modification_time") + private Long modificationTime; + + @JsonProperty("path") + private String path; + + public FileInfoPb setFileSize(Long fileSize) { + this.fileSize = fileSize; + return this; + } + + public Long getFileSize() { + return fileSize; + } + + public FileInfoPb setIsDir(Boolean isDir) { + this.isDir = isDir; + return this; + } + + public Boolean getIsDir() { + return isDir; + } + + public FileInfoPb setModificationTime(Long modificationTime) { + this.modificationTime = modificationTime; + return this; + } + + public Long getModificationTime() { + return modificationTime; + } + + public FileInfoPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileInfoPb that = (FileInfoPb) o; + return Objects.equals(fileSize, that.fileSize) + && Objects.equals(isDir, that.isDir) + && Objects.equals(modificationTime, that.modificationTime) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(fileSize, isDir, modificationTime, path); + } + + @Override + public String toString() { + return new ToStringer(FileInfoPb.class) + .add("fileSize", fileSize) + .add("isDir", isDir) + .add("modificationTime", modificationTime) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java index 25f565e50..7bfecc4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java @@ -26,6 +26,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java index 508253f77..501ffb394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java @@ -25,7 +25,7 @@ public void createDirectory(CreateDirectoryRequest request) { Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); try { Request req = new Request("PUT", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, CreateDirectoryResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -39,7 +39,7 @@ public void delete(DeleteFileRequest request) { "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -54,7 +54,7 @@ public void deleteDirectory(DeleteDirectoryRequest request) { Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteDirectoryResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -68,7 +68,7 @@ public DownloadResponse download(DownloadRequest request) { "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/octet-stream"); return apiClient.execute(req, DownloadResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public void getDirectoryMetadata(GetDirectoryMetadataRequest request) { Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); try { Request req = new Request("HEAD", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, GetDirectoryMetadataResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -98,7 +98,7 @@ public GetMetadataResponse getMetadata(GetMetadataRequest request) { "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); try { Request req = new Request("HEAD", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); return apiClient.execute(req, GetMetadataResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -113,7 +113,7 @@ public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListDirectoryResponse.class); } catch (IOException e) { @@ -128,7 +128,7 @@ public void upload(UploadRequest request) { "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); try { Request req = new Request("PUT", path, request.getContents()); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/octet-stream"); apiClient.execute(req, UploadResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java index b5103d010..791175943 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java @@ -21,6 +21,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java index 7225379c4..ef794f978 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get directory metadata */ @Generated +@JsonSerialize(using = GetDirectoryMetadataRequest.GetDirectoryMetadataRequestSerializer.class) +@JsonDeserialize(using = GetDirectoryMetadataRequest.GetDirectoryMetadataRequestDeserializer.class) public class GetDirectoryMetadataRequest { /** The absolute path of a directory. */ - @JsonIgnore private String directoryPath; + private String directoryPath; public GetDirectoryMetadataRequest setDirectoryPath(String directoryPath) { this.directoryPath = directoryPath; @@ -41,4 +52,41 @@ public String toString() { .add("directoryPath", directoryPath) .toString(); } + + GetDirectoryMetadataRequestPb toPb() { + GetDirectoryMetadataRequestPb pb = new GetDirectoryMetadataRequestPb(); + pb.setDirectoryPath(directoryPath); + + return pb; + } + + static GetDirectoryMetadataRequest fromPb(GetDirectoryMetadataRequestPb pb) { + GetDirectoryMetadataRequest model = new GetDirectoryMetadataRequest(); + model.setDirectoryPath(pb.getDirectoryPath()); + + return model; + } + + public static class GetDirectoryMetadataRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDirectoryMetadataRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDirectoryMetadataRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDirectoryMetadataRequestDeserializer + extends JsonDeserializer { + @Override + public GetDirectoryMetadataRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDirectoryMetadataRequestPb pb = mapper.readValue(p, GetDirectoryMetadataRequestPb.class); + return GetDirectoryMetadataRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequestPb.java new file mode 100755 index 000000000..4e7ea5aaa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get directory metadata */ +@Generated +class GetDirectoryMetadataRequestPb { + @JsonIgnore private String directoryPath; + + public GetDirectoryMetadataRequestPb setDirectoryPath(String directoryPath) { + this.directoryPath = directoryPath; + return this; + } + + public String getDirectoryPath() { + return directoryPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDirectoryMetadataRequestPb that = (GetDirectoryMetadataRequestPb) o; + return Objects.equals(directoryPath, that.directoryPath); + } + + @Override + public int hashCode() { + return Objects.hash(directoryPath); + } + + @Override + public String toString() { + return new ToStringer(GetDirectoryMetadataRequestPb.class) + .add("directoryPath", directoryPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java index 324875d3e..9283a0d37 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java @@ -4,9 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetDirectoryMetadataResponse.GetDirectoryMetadataResponseSerializer.class) +@JsonDeserialize( + using = GetDirectoryMetadataResponse.GetDirectoryMetadataResponseDeserializer.class) public class GetDirectoryMetadataResponse { @Override @@ -25,4 +38,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetDirectoryMetadataResponse.class).toString(); } + + GetDirectoryMetadataResponsePb toPb() { + GetDirectoryMetadataResponsePb pb = new GetDirectoryMetadataResponsePb(); + + return pb; + } + + static GetDirectoryMetadataResponse fromPb(GetDirectoryMetadataResponsePb pb) { + GetDirectoryMetadataResponse model = new GetDirectoryMetadataResponse(); + + return model; + } + + public static class GetDirectoryMetadataResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDirectoryMetadataResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDirectoryMetadataResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDirectoryMetadataResponseDeserializer + extends JsonDeserializer { + @Override + public GetDirectoryMetadataResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDirectoryMetadataResponsePb pb = mapper.readValue(p, GetDirectoryMetadataResponsePb.class); + return GetDirectoryMetadataResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponsePb.java new file mode 100755 index 000000000..c3b7472e9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class GetDirectoryMetadataResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(GetDirectoryMetadataResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java index 05693c633..e0f50a6d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get file metadata */ @Generated +@JsonSerialize(using = GetMetadataRequest.GetMetadataRequestSerializer.class) +@JsonDeserialize(using = GetMetadataRequest.GetMetadataRequestDeserializer.class) public class GetMetadataRequest { /** The absolute path of the file. */ - @JsonIgnore private String filePath; + private String filePath; public GetMetadataRequest setFilePath(String filePath) { this.filePath = filePath; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetMetadataRequest.class).add("filePath", filePath).toString(); } + + GetMetadataRequestPb toPb() { + GetMetadataRequestPb pb = new GetMetadataRequestPb(); + pb.setFilePath(filePath); + + return pb; + } + + static GetMetadataRequest fromPb(GetMetadataRequestPb pb) { + GetMetadataRequest model = new GetMetadataRequest(); + model.setFilePath(pb.getFilePath()); + + return model; + } + + public static class GetMetadataRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetMetadataRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetMetadataRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetMetadataRequestDeserializer extends JsonDeserializer { + @Override + public GetMetadataRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetMetadataRequestPb pb = mapper.readValue(p, GetMetadataRequestPb.class); + return GetMetadataRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequestPb.java new file mode 100755 index 000000000..e045fc68f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get file metadata */ +@Generated +class GetMetadataRequestPb { + @JsonIgnore private String filePath; + + public GetMetadataRequestPb setFilePath(String filePath) { + this.filePath = filePath; + return this; + } + + public String getFilePath() { + return filePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMetadataRequestPb that = (GetMetadataRequestPb) o; + return Objects.equals(filePath, that.filePath); + } + + @Override + public int hashCode() { + return Objects.hash(filePath); + } + + @Override + public String toString() { + return new ToStringer(GetMetadataRequestPb.class).add("filePath", filePath).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java index 5fda9475b..31897b3fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java @@ -3,26 +3,30 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Header; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetMetadataResponse.GetMetadataResponseSerializer.class) +@JsonDeserialize(using = GetMetadataResponse.GetMetadataResponseDeserializer.class) public class GetMetadataResponse { /** The length of the HTTP response body in bytes. */ - @JsonIgnore - @Header("content-length") private Long contentLength; /** */ - @JsonIgnore - @Header("content-type") private String contentType; /** The last modified time of the file in HTTP-date (RFC 7231) format. */ - @JsonIgnore - @Header("last-modified") private String lastModified; public GetMetadataResponse setContentLength(Long contentLength) { @@ -75,4 +79,43 @@ public String toString() { .add("lastModified", lastModified) .toString(); } + + GetMetadataResponsePb toPb() { + GetMetadataResponsePb pb = new GetMetadataResponsePb(); + pb.setContentLength(contentLength); + pb.setContentType(contentType); + pb.setLastModified(lastModified); + + return pb; + } + + static GetMetadataResponse fromPb(GetMetadataResponsePb pb) { + GetMetadataResponse model = new GetMetadataResponse(); + model.setContentLength(pb.getContentLength()); + model.setContentType(pb.getContentType()); + model.setLastModified(pb.getLastModified()); + + return model; + } + + public static class GetMetadataResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetMetadataResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetMetadataResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetMetadataResponseDeserializer + extends JsonDeserializer { + @Override + public GetMetadataResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetMetadataResponsePb pb = mapper.readValue(p, GetMetadataResponsePb.class); + return GetMetadataResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponsePb.java new file mode 100755 index 000000000..f8c38e18e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponsePb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Header; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +class GetMetadataResponsePb { + @JsonIgnore + @Header("content-length") + private Long contentLength; + + @JsonIgnore + @Header("content-type") + private String contentType; + + @JsonIgnore + @Header("last-modified") + private String lastModified; + + public GetMetadataResponsePb setContentLength(Long contentLength) { + this.contentLength = contentLength; + return this; + } + + public Long getContentLength() { + return contentLength; + } + + public GetMetadataResponsePb setContentType(String contentType) { + this.contentType = contentType; + return this; + } + + public String getContentType() { + return contentType; + } + + public GetMetadataResponsePb setLastModified(String lastModified) { + this.lastModified = lastModified; + return this; + } + + public String getLastModified() { + return lastModified; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMetadataResponsePb that = (GetMetadataResponsePb) o; + return Objects.equals(contentLength, that.contentLength) + && Objects.equals(contentType, that.contentType) + && Objects.equals(lastModified, that.lastModified); + } + + @Override + public int hashCode() { + return Objects.hash(contentLength, contentType, lastModified); + } + + @Override + public String toString() { + return new ToStringer(GetMetadataResponsePb.class) + .add("contentLength", contentLength) + .add("contentType", contentType) + .add("lastModified", lastModified) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java index f5091a01c..ecfc199b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the information of a file or directory */ @Generated +@JsonSerialize(using = GetStatusRequest.GetStatusRequestSerializer.class) +@JsonDeserialize(using = GetStatusRequest.GetStatusRequestDeserializer.class) public class GetStatusRequest { /** The path of the file or directory. The path should be the absolute DBFS path. */ - @JsonIgnore - @QueryParam("path") private String path; public GetStatusRequest setPath(String path) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetStatusRequest.class).add("path", path).toString(); } + + GetStatusRequestPb toPb() { + GetStatusRequestPb pb = new GetStatusRequestPb(); + pb.setPath(path); + + return pb; + } + + static GetStatusRequest fromPb(GetStatusRequestPb pb) { + GetStatusRequest model = new GetStatusRequest(); + model.setPath(pb.getPath()); + + return model; + } + + public static class GetStatusRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetStatusRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStatusRequestDeserializer extends JsonDeserializer { + @Override + public GetStatusRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStatusRequestPb pb = mapper.readValue(p, GetStatusRequestPb.class); + return GetStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequestPb.java new file mode 100755 index 000000000..fe2141171 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the information of a file or directory */ +@Generated +class GetStatusRequestPb { + @JsonIgnore + @QueryParam("path") + private String path; + + public GetStatusRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStatusRequestPb that = (GetStatusRequestPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(GetStatusRequestPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java index 5b66be4cc..4dfa249db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List directory contents or file details */ @Generated +@JsonSerialize(using = ListDbfsRequest.ListDbfsRequestSerializer.class) +@JsonDeserialize(using = ListDbfsRequest.ListDbfsRequestDeserializer.class) public class ListDbfsRequest { /** The path of the file or directory. The path should be the absolute DBFS path. */ - @JsonIgnore - @QueryParam("path") private String path; public ListDbfsRequest setPath(String path) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListDbfsRequest.class).add("path", path).toString(); } + + ListDbfsRequestPb toPb() { + ListDbfsRequestPb pb = new ListDbfsRequestPb(); + pb.setPath(path); + + return pb; + } + + static ListDbfsRequest fromPb(ListDbfsRequestPb pb) { + ListDbfsRequest model = new ListDbfsRequest(); + model.setPath(pb.getPath()); + + return model; + } + + public static class ListDbfsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListDbfsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDbfsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDbfsRequestDeserializer extends JsonDeserializer { + @Override + public ListDbfsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDbfsRequestPb pb = mapper.readValue(p, ListDbfsRequestPb.class); + return ListDbfsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequestPb.java new file mode 100755 index 000000000..82a2e5fbd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List directory contents or file details */ +@Generated +class ListDbfsRequestPb { + @JsonIgnore + @QueryParam("path") + private String path; + + public ListDbfsRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDbfsRequestPb that = (ListDbfsRequestPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(ListDbfsRequestPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java index 684dde699..1c8ba0a28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java @@ -3,16 +3,27 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List directory contents */ @Generated +@JsonSerialize(using = ListDirectoryContentsRequest.ListDirectoryContentsRequestSerializer.class) +@JsonDeserialize( + using = ListDirectoryContentsRequest.ListDirectoryContentsRequestDeserializer.class) public class ListDirectoryContentsRequest { /** The absolute path of a directory. */ - @JsonIgnore private String directoryPath; + private String directoryPath; /** * The maximum number of directory entries to return. The response may contain fewer entries. If @@ -25,8 +36,6 @@ public class ListDirectoryContentsRequest { *

If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. * Values above 1000 will be coerced to 1000. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** @@ -37,8 +46,6 @@ public class ListDirectoryContentsRequest { * requesting pages of entries until the response contains no `next_page_token`. Note that the * number of entries returned must not be used to determine when the listing is complete. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListDirectoryContentsRequest setDirectoryPath(String directoryPath) { @@ -91,4 +98,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListDirectoryContentsRequestPb toPb() { + ListDirectoryContentsRequestPb pb = new ListDirectoryContentsRequestPb(); + pb.setDirectoryPath(directoryPath); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListDirectoryContentsRequest fromPb(ListDirectoryContentsRequestPb pb) { + ListDirectoryContentsRequest model = new ListDirectoryContentsRequest(); + model.setDirectoryPath(pb.getDirectoryPath()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListDirectoryContentsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDirectoryContentsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDirectoryContentsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDirectoryContentsRequestDeserializer + extends JsonDeserializer { + @Override + public ListDirectoryContentsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDirectoryContentsRequestPb pb = mapper.readValue(p, ListDirectoryContentsRequestPb.class); + return ListDirectoryContentsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequestPb.java new file mode 100755 index 000000000..bcc28ead8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List directory contents */ +@Generated +class ListDirectoryContentsRequestPb { + @JsonIgnore private String directoryPath; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListDirectoryContentsRequestPb setDirectoryPath(String directoryPath) { + this.directoryPath = directoryPath; + return this; + } + + public String getDirectoryPath() { + return directoryPath; + } + + public ListDirectoryContentsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDirectoryContentsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDirectoryContentsRequestPb that = (ListDirectoryContentsRequestPb) o; + return Objects.equals(directoryPath, that.directoryPath) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(directoryPath, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDirectoryContentsRequestPb.class) + .add("directoryPath", directoryPath) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java index 6a4f7f195..9d23bd277 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListDirectoryResponse.ListDirectoryResponseSerializer.class) +@JsonDeserialize(using = ListDirectoryResponse.ListDirectoryResponseDeserializer.class) public class ListDirectoryResponse { /** Array of DirectoryEntry. */ - @JsonProperty("contents") private Collection contents; /** A token, which can be sent as `page_token` to retrieve the next page. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListDirectoryResponse setContents(Collection contents) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListDirectoryResponsePb toPb() { + ListDirectoryResponsePb pb = new ListDirectoryResponsePb(); + pb.setContents(contents); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListDirectoryResponse fromPb(ListDirectoryResponsePb pb) { + ListDirectoryResponse model = new ListDirectoryResponse(); + model.setContents(pb.getContents()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListDirectoryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDirectoryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDirectoryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDirectoryResponseDeserializer + extends JsonDeserializer { + @Override + public ListDirectoryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDirectoryResponsePb pb = mapper.readValue(p, ListDirectoryResponsePb.class); + return ListDirectoryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponsePb.java new file mode 100755 index 000000000..58a9a8f77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListDirectoryResponsePb { + @JsonProperty("contents") + private Collection contents; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDirectoryResponsePb setContents(Collection contents) { + this.contents = contents; + return this; + } + + public Collection getContents() { + return contents; + } + + public ListDirectoryResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDirectoryResponsePb that = (ListDirectoryResponsePb) o; + return Objects.equals(contents, that.contents) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(contents, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDirectoryResponsePb.class) + .add("contents", contents) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java index fd42cb970..dec7253ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListStatusResponse.ListStatusResponseSerializer.class) +@JsonDeserialize(using = ListStatusResponse.ListStatusResponseDeserializer.class) public class ListStatusResponse { /** A list of FileInfo's that describe contents of directory or file. See example above. */ - @JsonProperty("files") private Collection files; public ListStatusResponse setFiles(Collection files) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListStatusResponse.class).add("files", files).toString(); } + + ListStatusResponsePb toPb() { + ListStatusResponsePb pb = new ListStatusResponsePb(); + pb.setFiles(files); + + return pb; + } + + static ListStatusResponse fromPb(ListStatusResponsePb pb) { + ListStatusResponse model = new ListStatusResponse(); + model.setFiles(pb.getFiles()); + + return model; + } + + public static class ListStatusResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListStatusResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListStatusResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListStatusResponseDeserializer extends JsonDeserializer { + @Override + public ListStatusResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListStatusResponsePb pb = mapper.readValue(p, ListStatusResponsePb.class); + return ListStatusResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponsePb.java new file mode 100755 index 000000000..901959b69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListStatusResponsePb { + @JsonProperty("files") + private Collection files; + + public ListStatusResponsePb setFiles(Collection files) { + this.files = files; + return this; + } + + public Collection getFiles() { + return files; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListStatusResponsePb that = (ListStatusResponsePb) o; + return Objects.equals(files, that.files); + } + + @Override + public int hashCode() { + return Objects.hash(files); + } + + @Override + public String toString() { + return new ToStringer(ListStatusResponsePb.class).add("files", files).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java index 2ffbf91d8..110b6ad53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MkDirs.MkDirsSerializer.class) +@JsonDeserialize(using = MkDirs.MkDirsDeserializer.class) public class MkDirs { /** The path of the new directory. The path should be the absolute DBFS path. */ - @JsonProperty("path") private String path; public MkDirs setPath(String path) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(MkDirs.class).add("path", path).toString(); } + + MkDirsPb toPb() { + MkDirsPb pb = new MkDirsPb(); + pb.setPath(path); + + return pb; + } + + static MkDirs fromPb(MkDirsPb pb) { + MkDirs model = new MkDirs(); + model.setPath(pb.getPath()); + + return model; + } + + public static class MkDirsSerializer extends JsonSerializer { + @Override + public void serialize(MkDirs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MkDirsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MkDirsDeserializer extends JsonDeserializer { + @Override + public MkDirs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MkDirsPb pb = mapper.readValue(p, MkDirsPb.class); + return MkDirs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsPb.java new file mode 100755 index 000000000..e61e7eb3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MkDirsPb { + @JsonProperty("path") + private String path; + + public MkDirsPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MkDirsPb that = (MkDirsPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(MkDirsPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java index 4fdfa15f3..98c732c56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MkDirsResponse.MkDirsResponseSerializer.class) +@JsonDeserialize(using = MkDirsResponse.MkDirsResponseDeserializer.class) public class MkDirsResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(MkDirsResponse.class).toString(); } + + MkDirsResponsePb toPb() { + MkDirsResponsePb pb = new MkDirsResponsePb(); + + return pb; + } + + static MkDirsResponse fromPb(MkDirsResponsePb pb) { + MkDirsResponse model = new MkDirsResponse(); + + return model; + } + + public static class MkDirsResponseSerializer extends JsonSerializer { + @Override + public void serialize(MkDirsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MkDirsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MkDirsResponseDeserializer extends JsonDeserializer { + @Override + public MkDirsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MkDirsResponsePb pb = mapper.readValue(p, MkDirsResponsePb.class); + return MkDirsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponsePb.java new file mode 100755 index 000000000..c60832583 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class MkDirsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(MkDirsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java index b2da36d79..debf3f7e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Move.MoveSerializer.class) +@JsonDeserialize(using = Move.MoveDeserializer.class) public class Move { /** The destination path of the file or directory. The path should be the absolute DBFS path. */ - @JsonProperty("destination_path") private String destinationPath; /** The source path of the file or directory. The path should be the absolute DBFS path. */ - @JsonProperty("source_path") private String sourcePath; public Move setDestinationPath(String destinationPath) { @@ -56,4 +65,39 @@ public String toString() { .add("sourcePath", sourcePath) .toString(); } + + MovePb toPb() { + MovePb pb = new MovePb(); + pb.setDestinationPath(destinationPath); + pb.setSourcePath(sourcePath); + + return pb; + } + + static Move fromPb(MovePb pb) { + Move model = new Move(); + model.setDestinationPath(pb.getDestinationPath()); + model.setSourcePath(pb.getSourcePath()); + + return model; + } + + public static class MoveSerializer extends JsonSerializer { + @Override + public void serialize(Move value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MovePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MoveDeserializer extends JsonDeserializer { + @Override + public Move deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MovePb pb = mapper.readValue(p, MovePb.class); + return Move.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MovePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MovePb.java new file mode 100755 index 000000000..171213b4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MovePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MovePb { + @JsonProperty("destination_path") + private String destinationPath; + + @JsonProperty("source_path") + private String sourcePath; + + public MovePb setDestinationPath(String destinationPath) { + this.destinationPath = destinationPath; + return this; + } + + public String getDestinationPath() { + return destinationPath; + } + + public MovePb setSourcePath(String sourcePath) { + this.sourcePath = sourcePath; + return this; + } + + public String getSourcePath() { + return sourcePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MovePb that = (MovePb) o; + return Objects.equals(destinationPath, that.destinationPath) + && Objects.equals(sourcePath, that.sourcePath); + } + + @Override + public int hashCode() { + return Objects.hash(destinationPath, sourcePath); + } + + @Override + public String toString() { + return new ToStringer(MovePb.class) + .add("destinationPath", destinationPath) + .add("sourcePath", sourcePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java index 47ccb461e..f0e26c50a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MoveResponse.MoveResponseSerializer.class) +@JsonDeserialize(using = MoveResponse.MoveResponseDeserializer.class) public class MoveResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(MoveResponse.class).toString(); } + + MoveResponsePb toPb() { + MoveResponsePb pb = new MoveResponsePb(); + + return pb; + } + + static MoveResponse fromPb(MoveResponsePb pb) { + MoveResponse model = new MoveResponse(); + + return model; + } + + public static class MoveResponseSerializer extends JsonSerializer { + @Override + public void serialize(MoveResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MoveResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MoveResponseDeserializer extends JsonDeserializer { + @Override + public MoveResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MoveResponsePb pb = mapper.readValue(p, MoveResponsePb.class); + return MoveResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponsePb.java new file mode 100755 index 000000000..5425df166 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class MoveResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(MoveResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java index 6c3a052c6..40d628139 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Put.PutSerializer.class) +@JsonDeserialize(using = Put.PutDeserializer.class) public class Put { /** This parameter might be absent, and instead a posted file will be used. */ - @JsonProperty("contents") private String contents; /** The flag that specifies whether to overwrite existing file/files. */ - @JsonProperty("overwrite") private Boolean overwrite; /** The path of the new file. The path should be the absolute DBFS path. */ - @JsonProperty("path") private String path; public Put setContents(String contents) { @@ -71,4 +79,41 @@ public String toString() { .add("path", path) .toString(); } + + PutPb toPb() { + PutPb pb = new PutPb(); + pb.setContents(contents); + pb.setOverwrite(overwrite); + pb.setPath(path); + + return pb; + } + + static Put fromPb(PutPb pb) { + Put model = new Put(); + model.setContents(pb.getContents()); + model.setOverwrite(pb.getOverwrite()); + model.setPath(pb.getPath()); + + return model; + } + + public static class PutSerializer extends JsonSerializer { + @Override + public void serialize(Put value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutDeserializer extends JsonDeserializer { + @Override + public Put deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutPb pb = mapper.readValue(p, PutPb.class); + return Put.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutPb.java new file mode 100755 index 000000000..fb546520d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PutPb { + @JsonProperty("contents") + private String contents; + + @JsonProperty("overwrite") + private Boolean overwrite; + + @JsonProperty("path") + private String path; + + public PutPb setContents(String contents) { + this.contents = contents; + return this; + } + + public String getContents() { + return contents; + } + + public PutPb setOverwrite(Boolean overwrite) { + this.overwrite = overwrite; + return this; + } + + public Boolean getOverwrite() { + return overwrite; + } + + public PutPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutPb that = (PutPb) o; + return Objects.equals(contents, that.contents) + && Objects.equals(overwrite, that.overwrite) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(contents, overwrite, path); + } + + @Override + public String toString() { + return new ToStringer(PutPb.class) + .add("contents", contents) + .add("overwrite", overwrite) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java index 8d41a1c8f..8540b9c67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PutResponse.PutResponseSerializer.class) +@JsonDeserialize(using = PutResponse.PutResponseDeserializer.class) public class PutResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(PutResponse.class).toString(); } + + PutResponsePb toPb() { + PutResponsePb pb = new PutResponsePb(); + + return pb; + } + + static PutResponse fromPb(PutResponsePb pb) { + PutResponse model = new PutResponse(); + + return model; + } + + public static class PutResponseSerializer extends JsonSerializer { + @Override + public void serialize(PutResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutResponseDeserializer extends JsonDeserializer { + @Override + public PutResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutResponsePb pb = mapper.readValue(p, PutResponsePb.class); + return PutResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponsePb.java new file mode 100755 index 000000000..2c00dd766 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PutResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PutResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java index 945e0c78d..aeb12e47f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java @@ -3,30 +3,34 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the contents of a file */ @Generated +@JsonSerialize(using = ReadDbfsRequest.ReadDbfsRequestSerializer.class) +@JsonDeserialize(using = ReadDbfsRequest.ReadDbfsRequestDeserializer.class) public class ReadDbfsRequest { /** * The number of bytes to read starting from the offset. This has a limit of 1 MB, and a default * value of 0.5 MB. */ - @JsonIgnore - @QueryParam("length") private Long length; /** The offset to read from in bytes. */ - @JsonIgnore - @QueryParam("offset") private Long offset; /** The path of the file to read. The path should be the absolute DBFS path. */ - @JsonIgnore - @QueryParam("path") private String path; public ReadDbfsRequest setLength(Long length) { @@ -79,4 +83,42 @@ public String toString() { .add("path", path) .toString(); } + + ReadDbfsRequestPb toPb() { + ReadDbfsRequestPb pb = new ReadDbfsRequestPb(); + pb.setLength(length); + pb.setOffset(offset); + pb.setPath(path); + + return pb; + } + + static ReadDbfsRequest fromPb(ReadDbfsRequestPb pb) { + ReadDbfsRequest model = new ReadDbfsRequest(); + model.setLength(pb.getLength()); + model.setOffset(pb.getOffset()); + model.setPath(pb.getPath()); + + return model; + } + + public static class ReadDbfsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ReadDbfsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReadDbfsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReadDbfsRequestDeserializer extends JsonDeserializer { + @Override + public ReadDbfsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReadDbfsRequestPb pb = mapper.readValue(p, ReadDbfsRequestPb.class); + return ReadDbfsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequestPb.java new file mode 100755 index 000000000..b1ba85671 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the contents of a file */ +@Generated +class ReadDbfsRequestPb { + @JsonIgnore + @QueryParam("length") + private Long length; + + @JsonIgnore + @QueryParam("offset") + private Long offset; + + @JsonIgnore + @QueryParam("path") + private String path; + + public ReadDbfsRequestPb setLength(Long length) { + this.length = length; + return this; + } + + public Long getLength() { + return length; + } + + public ReadDbfsRequestPb setOffset(Long offset) { + this.offset = offset; + return this; + } + + public Long getOffset() { + return offset; + } + + public ReadDbfsRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReadDbfsRequestPb that = (ReadDbfsRequestPb) o; + return Objects.equals(length, that.length) + && Objects.equals(offset, that.offset) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(length, offset, path); + } + + @Override + public String toString() { + return new ToStringer(ReadDbfsRequestPb.class) + .add("length", length) + .add("offset", offset) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java index 97a5b3dd0..fde3d6368 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ReadResponse.ReadResponseSerializer.class) +@JsonDeserialize(using = ReadResponse.ReadResponseDeserializer.class) public class ReadResponse { /** * The number of bytes read (could be less than ``length`` if we hit end of file). This refers to * number of bytes read in unencoded version (response data is base64-encoded). */ - @JsonProperty("bytes_read") private Long bytesRead; /** The base64-encoded contents of the file read. */ - @JsonProperty("data") private String data; public ReadResponse setBytesRead(Long bytesRead) { @@ -58,4 +67,39 @@ public String toString() { .add("data", data) .toString(); } + + ReadResponsePb toPb() { + ReadResponsePb pb = new ReadResponsePb(); + pb.setBytesRead(bytesRead); + pb.setData(data); + + return pb; + } + + static ReadResponse fromPb(ReadResponsePb pb) { + ReadResponse model = new ReadResponse(); + model.setBytesRead(pb.getBytesRead()); + model.setData(pb.getData()); + + return model; + } + + public static class ReadResponseSerializer extends JsonSerializer { + @Override + public void serialize(ReadResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReadResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReadResponseDeserializer extends JsonDeserializer { + @Override + public ReadResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReadResponsePb pb = mapper.readValue(p, ReadResponsePb.class); + return ReadResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponsePb.java new file mode 100755 index 000000000..1375aa9ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ReadResponsePb { + @JsonProperty("bytes_read") + private Long bytesRead; + + @JsonProperty("data") + private String data; + + public ReadResponsePb setBytesRead(Long bytesRead) { + this.bytesRead = bytesRead; + return this; + } + + public Long getBytesRead() { + return bytesRead; + } + + public ReadResponsePb setData(String data) { + this.data = data; + return this; + } + + public String getData() { + return data; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReadResponsePb that = (ReadResponsePb) o; + return Objects.equals(bytesRead, that.bytesRead) && Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(bytesRead, data); + } + + @Override + public String toString() { + return new ToStringer(ReadResponsePb.class) + .add("bytesRead", bytesRead) + .add("data", data) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java index a6c715147..21e5a0f21 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java @@ -3,27 +3,35 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; /** Upload a file */ @Generated +@JsonSerialize(using = UploadRequest.UploadRequestSerializer.class) +@JsonDeserialize(using = UploadRequest.UploadRequestDeserializer.class) public class UploadRequest { /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; /** The absolute path of the file. */ - @JsonIgnore private String filePath; + private String filePath; /** * If true or unspecified, an existing file will be overwritten. If false, an error will be * returned if the path points to an existing file. */ - @JsonIgnore - @QueryParam("overwrite") private Boolean overwrite; public UploadRequest setContents(InputStream contents) { @@ -76,4 +84,41 @@ public String toString() { .add("overwrite", overwrite) .toString(); } + + UploadRequestPb toPb() { + UploadRequestPb pb = new UploadRequestPb(); + pb.setContents(contents); + pb.setFilePath(filePath); + pb.setOverwrite(overwrite); + + return pb; + } + + static UploadRequest fromPb(UploadRequestPb pb) { + UploadRequest model = new UploadRequest(); + model.setContents(pb.getContents()); + model.setFilePath(pb.getFilePath()); + model.setOverwrite(pb.getOverwrite()); + + return model; + } + + public static class UploadRequestSerializer extends JsonSerializer { + @Override + public void serialize(UploadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UploadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UploadRequestDeserializer extends JsonDeserializer { + @Override + public UploadRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UploadRequestPb pb = mapper.readValue(p, UploadRequestPb.class); + return UploadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequestPb.java new file mode 100755 index 000000000..e1a48bcc8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequestPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +/** Upload a file */ +@Generated +class UploadRequestPb { + @JsonIgnore private InputStream contents; + + @JsonIgnore private String filePath; + + @JsonIgnore + @QueryParam("overwrite") + private Boolean overwrite; + + public UploadRequestPb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + public UploadRequestPb setFilePath(String filePath) { + this.filePath = filePath; + return this; + } + + public String getFilePath() { + return filePath; + } + + public UploadRequestPb setOverwrite(Boolean overwrite) { + this.overwrite = overwrite; + return this; + } + + public Boolean getOverwrite() { + return overwrite; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UploadRequestPb that = (UploadRequestPb) o; + return Objects.equals(contents, that.contents) + && Objects.equals(filePath, that.filePath) + && Objects.equals(overwrite, that.overwrite); + } + + @Override + public int hashCode() { + return Objects.hash(contents, filePath, overwrite); + } + + @Override + public String toString() { + return new ToStringer(UploadRequestPb.class) + .add("contents", contents) + .add("filePath", filePath) + .add("overwrite", overwrite) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java index 68c79384d..4b3618948 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UploadResponse.UploadResponseSerializer.class) +@JsonDeserialize(using = UploadResponse.UploadResponseDeserializer.class) public class UploadResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UploadResponse.class).toString(); } + + UploadResponsePb toPb() { + UploadResponsePb pb = new UploadResponsePb(); + + return pb; + } + + static UploadResponse fromPb(UploadResponsePb pb) { + UploadResponse model = new UploadResponse(); + + return model; + } + + public static class UploadResponseSerializer extends JsonSerializer { + @Override + public void serialize(UploadResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UploadResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UploadResponseDeserializer extends JsonDeserializer { + @Override + public UploadResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UploadResponsePb pb = mapper.readValue(p, UploadResponsePb.class); + return UploadResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponsePb.java new file mode 100755 index 000000000..a317cd7ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.files; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UploadResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UploadResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java index a28f4bc83..1f0002dc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java @@ -21,7 +21,7 @@ public CheckPolicyResponse checkPolicy(CheckPolicyRequest request) { String path = "/api/2.0/access-control/check-policy-v2"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CheckPolicyResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java index 2e96227dc..02b003711 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccessControlRequest.AccessControlRequestSerializer.class) +@JsonDeserialize(using = AccessControlRequest.AccessControlRequestDeserializer.class) public class AccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public AccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,46 @@ public String toString() { .add("userName", userName) .toString(); } + + AccessControlRequestPb toPb() { + AccessControlRequestPb pb = new AccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static AccessControlRequest fromPb(AccessControlRequestPb pb) { + AccessControlRequest model = new AccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class AccessControlRequestSerializer extends JsonSerializer { + @Override + public void serialize( + AccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public AccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccessControlRequestPb pb = mapper.readValue(p, AccessControlRequestPb.class); + return AccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequestPb.java new file mode 100755 index 000000000..17c9423b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public AccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public AccessControlRequestPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public AccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public AccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccessControlRequestPb that = (AccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(AccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponse.java index 23cb047f6..5397dc83f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AccessControlResponse.AccessControlResponseSerializer.class) +@JsonDeserialize(using = AccessControlResponse.AccessControlResponseDeserializer.class) public class AccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public AccessControlResponse setAllPermissions(Collection allPermissions) { @@ -102,4 +108,49 @@ public String toString() { .add("userName", userName) .toString(); } + + AccessControlResponsePb toPb() { + AccessControlResponsePb pb = new AccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static AccessControlResponse fromPb(AccessControlResponsePb pb) { + AccessControlResponse model = new AccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class AccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + AccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public AccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccessControlResponsePb pb = mapper.readValue(p, AccessControlResponsePb.class); + return AccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponsePb.java new file mode 100755 index 000000000..876903b0b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public AccessControlResponsePb setAllPermissions(Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public AccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public AccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public AccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public AccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccessControlResponsePb that = (AccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(AccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java index c59b4c528..8f06b4677 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java @@ -25,7 +25,7 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource( apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetAssignableRolesForResourceResponse.class); } catch (IOException e) { @@ -41,7 +41,7 @@ public RuleSetResponse getRuleSet(GetRuleSetRequest request) { apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RuleSetResponse.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) { apiClient.configuredAccountID()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RuleSetResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java index a0911c9c3..647c9e214 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java @@ -22,7 +22,7 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource( String path = "/api/2.0/preview/accounts/access-control/assignable-roles"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetAssignableRolesForResourceResponse.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public RuleSetResponse getRuleSet(GetRuleSetRequest request) { String path = "/api/2.0/preview/accounts/access-control/rule-sets"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RuleSetResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) { String path = "/api/2.0/preview/accounts/access-control/rule-sets"; try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RuleSetResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java index 634e2397a..7016a0673 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java @@ -75,7 +75,7 @@ public Group get(GetAccountGroupRequest request) { public Iterable list(ListAccountGroupsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java index ec429f93b..f648cdff4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java @@ -22,7 +22,7 @@ public Group create(Group request) { String.format("/api/2.0/accounts/%s/scim/v2/Groups", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Group.class); @@ -39,7 +39,7 @@ public void delete(DeleteAccountGroupRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -54,7 +54,7 @@ public Group get(GetAccountGroupRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Group.class); } catch (IOException e) { @@ -68,7 +68,7 @@ public ListGroupsResponse list(ListAccountGroupsRequest request) { String.format("/api/2.0/accounts/%s/scim/v2/Groups", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListGroupsResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public void patch(PartialUpdate request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -100,7 +100,7 @@ public void update(Group request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java index ff6280873..415577a5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java @@ -73,7 +73,7 @@ public ServicePrincipal get(GetAccountServicePrincipalRequest request) { public Iterable list(ListAccountServicePrincipalsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java index b576d22d2..5049c41b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java @@ -23,7 +23,7 @@ public ServicePrincipal create(ServicePrincipal request) { "/api/2.0/accounts/%s/scim/v2/ServicePrincipals", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServicePrincipal.class); @@ -40,7 +40,7 @@ public void delete(DeleteAccountServicePrincipalRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -55,7 +55,7 @@ public ServicePrincipal get(GetAccountServicePrincipalRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ServicePrincipal.class); } catch (IOException e) { @@ -70,7 +70,7 @@ public ListServicePrincipalResponse list(ListAccountServicePrincipalsRequest req "/api/2.0/accounts/%s/scim/v2/ServicePrincipals", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListServicePrincipalResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public void patch(PartialUpdate request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -102,7 +102,7 @@ public void update(ServicePrincipal request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java index b378db43c..77249ac5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java @@ -79,7 +79,7 @@ public User get(GetAccountUserRequest request) { public Iterable list(ListAccountUsersRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java index 47fd2d266..245e65506 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java @@ -22,7 +22,7 @@ public User create(User request) { String.format("/api/2.0/accounts/%s/scim/v2/Users", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, User.class); @@ -39,7 +39,7 @@ public void delete(DeleteAccountUserRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -54,7 +54,7 @@ public User get(GetAccountUserRequest request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, User.class); } catch (IOException e) { @@ -68,7 +68,7 @@ public ListUsersResponse list(ListAccountUsersRequest request) { String.format("/api/2.0/accounts/%s/scim/v2/Users", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListUsersResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public void patch(PartialUpdate request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -100,7 +100,7 @@ public void update(User request) { apiClient.configuredAccountID(), request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java index 72dbf807e..51e0178a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java @@ -3,9 +3,17 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,10 +21,10 @@ * principal of a permission set assignment but an actor is always a user or a service principal */ @Generated +@JsonSerialize(using = Actor.ActorSerializer.class) +@JsonDeserialize(using = Actor.ActorDeserializer.class) public class Actor { /** */ - @JsonProperty("actor_id") - @QueryParam("actor_id") private Long actorId; public Actor setActorId(Long actorId) { @@ -45,4 +53,37 @@ public int hashCode() { public String toString() { return new ToStringer(Actor.class).add("actorId", actorId).toString(); } + + ActorPb toPb() { + ActorPb pb = new ActorPb(); + pb.setActorId(actorId); + + return pb; + } + + static Actor fromPb(ActorPb pb) { + Actor model = new Actor(); + model.setActorId(pb.getActorId()); + + return model; + } + + public static class ActorSerializer extends JsonSerializer { + @Override + public void serialize(Actor value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ActorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ActorDeserializer extends JsonDeserializer { + @Override + public Actor deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ActorPb pb = mapper.readValue(p, ActorPb.class); + return Actor.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ActorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ActorPb.java new file mode 100755 index 000000000..150fd452e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ActorPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * represents an identity trying to access a resource - user or a service principal group can be a + * principal of a permission set assignment but an actor is always a user or a service principal + */ +@Generated +class ActorPb { + @JsonProperty("actor_id") + @QueryParam("actor_id") + private Long actorId; + + public ActorPb setActorId(Long actorId) { + this.actorId = actorId; + return this; + } + + public Long getActorId() { + return actorId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ActorPb that = (ActorPb) o; + return Objects.equals(actorId, that.actorId); + } + + @Override + public int hashCode() { + return Objects.hash(actorId); + } + + @Override + public String toString() { + return new ToStringer(ActorPb.class).add("actorId", actorId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java index fae98e5d2..de18fe8ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java @@ -3,32 +3,34 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Check access policy to a resource */ @Generated +@JsonSerialize(using = CheckPolicyRequest.CheckPolicyRequestSerializer.class) +@JsonDeserialize(using = CheckPolicyRequest.CheckPolicyRequestDeserializer.class) public class CheckPolicyRequest { /** */ - @JsonIgnore - @QueryParam("actor") private Actor actor; /** */ - @JsonIgnore - @QueryParam("authz_identity") private RequestAuthzIdentity authzIdentity; /** */ - @JsonIgnore - @QueryParam("consistency_token") private ConsistencyToken consistencyToken; /** */ - @JsonIgnore - @QueryParam("permission") private String permission; /** @@ -36,13 +38,9 @@ public class CheckPolicyRequest { * (servicePrincipal.ruleSet/update, * accounts//servicePrincipals//ruleSets/default) */ - @JsonIgnore - @QueryParam("resource") private String resource; /** */ - @JsonIgnore - @QueryParam("resource_info") private ResourceInfo resourceInfo; public CheckPolicyRequest setActor(Actor actor) { @@ -128,4 +126,48 @@ public String toString() { .add("resourceInfo", resourceInfo) .toString(); } + + CheckPolicyRequestPb toPb() { + CheckPolicyRequestPb pb = new CheckPolicyRequestPb(); + pb.setActor(actor); + pb.setAuthzIdentity(authzIdentity); + pb.setConsistencyToken(consistencyToken); + pb.setPermission(permission); + pb.setResource(resource); + pb.setResourceInfo(resourceInfo); + + return pb; + } + + static CheckPolicyRequest fromPb(CheckPolicyRequestPb pb) { + CheckPolicyRequest model = new CheckPolicyRequest(); + model.setActor(pb.getActor()); + model.setAuthzIdentity(pb.getAuthzIdentity()); + model.setConsistencyToken(pb.getConsistencyToken()); + model.setPermission(pb.getPermission()); + model.setResource(pb.getResource()); + model.setResourceInfo(pb.getResourceInfo()); + + return model; + } + + public static class CheckPolicyRequestSerializer extends JsonSerializer { + @Override + public void serialize(CheckPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CheckPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CheckPolicyRequestDeserializer extends JsonDeserializer { + @Override + public CheckPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CheckPolicyRequestPb pb = mapper.readValue(p, CheckPolicyRequestPb.class); + return CheckPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequestPb.java new file mode 100755 index 000000000..843db5a9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequestPb.java @@ -0,0 +1,121 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Check access policy to a resource */ +@Generated +class CheckPolicyRequestPb { + @JsonIgnore + @QueryParam("actor") + private Actor actor; + + @JsonIgnore + @QueryParam("authz_identity") + private RequestAuthzIdentity authzIdentity; + + @JsonIgnore + @QueryParam("consistency_token") + private ConsistencyToken consistencyToken; + + @JsonIgnore + @QueryParam("permission") + private String permission; + + @JsonIgnore + @QueryParam("resource") + private String resource; + + @JsonIgnore + @QueryParam("resource_info") + private ResourceInfo resourceInfo; + + public CheckPolicyRequestPb setActor(Actor actor) { + this.actor = actor; + return this; + } + + public Actor getActor() { + return actor; + } + + public CheckPolicyRequestPb setAuthzIdentity(RequestAuthzIdentity authzIdentity) { + this.authzIdentity = authzIdentity; + return this; + } + + public RequestAuthzIdentity getAuthzIdentity() { + return authzIdentity; + } + + public CheckPolicyRequestPb setConsistencyToken(ConsistencyToken consistencyToken) { + this.consistencyToken = consistencyToken; + return this; + } + + public ConsistencyToken getConsistencyToken() { + return consistencyToken; + } + + public CheckPolicyRequestPb setPermission(String permission) { + this.permission = permission; + return this; + } + + public String getPermission() { + return permission; + } + + public CheckPolicyRequestPb setResource(String resource) { + this.resource = resource; + return this; + } + + public String getResource() { + return resource; + } + + public CheckPolicyRequestPb setResourceInfo(ResourceInfo resourceInfo) { + this.resourceInfo = resourceInfo; + return this; + } + + public ResourceInfo getResourceInfo() { + return resourceInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CheckPolicyRequestPb that = (CheckPolicyRequestPb) o; + return Objects.equals(actor, that.actor) + && Objects.equals(authzIdentity, that.authzIdentity) + && Objects.equals(consistencyToken, that.consistencyToken) + && Objects.equals(permission, that.permission) + && Objects.equals(resource, that.resource) + && Objects.equals(resourceInfo, that.resourceInfo); + } + + @Override + public int hashCode() { + return Objects.hash(actor, authzIdentity, consistencyToken, permission, resource, resourceInfo); + } + + @Override + public String toString() { + return new ToStringer(CheckPolicyRequestPb.class) + .add("actor", actor) + .add("authzIdentity", authzIdentity) + .add("consistencyToken", consistencyToken) + .add("permission", permission) + .add("resource", resource) + .add("resourceInfo", resourceInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java index 41dcae904..3ca75225c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CheckPolicyResponse.CheckPolicyResponseSerializer.class) +@JsonDeserialize(using = CheckPolicyResponse.CheckPolicyResponseDeserializer.class) public class CheckPolicyResponse { /** */ - @JsonProperty("consistency_token") private ConsistencyToken consistencyToken; /** */ - @JsonProperty("is_permitted") private Boolean isPermitted; public CheckPolicyResponse setConsistencyToken(ConsistencyToken consistencyToken) { @@ -56,4 +65,41 @@ public String toString() { .add("isPermitted", isPermitted) .toString(); } + + CheckPolicyResponsePb toPb() { + CheckPolicyResponsePb pb = new CheckPolicyResponsePb(); + pb.setConsistencyToken(consistencyToken); + pb.setIsPermitted(isPermitted); + + return pb; + } + + static CheckPolicyResponse fromPb(CheckPolicyResponsePb pb) { + CheckPolicyResponse model = new CheckPolicyResponse(); + model.setConsistencyToken(pb.getConsistencyToken()); + model.setIsPermitted(pb.getIsPermitted()); + + return model; + } + + public static class CheckPolicyResponseSerializer extends JsonSerializer { + @Override + public void serialize(CheckPolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CheckPolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CheckPolicyResponseDeserializer + extends JsonDeserializer { + @Override + public CheckPolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CheckPolicyResponsePb pb = mapper.readValue(p, CheckPolicyResponsePb.class); + return CheckPolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponsePb.java new file mode 100755 index 000000000..6deb12e27 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CheckPolicyResponsePb { + @JsonProperty("consistency_token") + private ConsistencyToken consistencyToken; + + @JsonProperty("is_permitted") + private Boolean isPermitted; + + public CheckPolicyResponsePb setConsistencyToken(ConsistencyToken consistencyToken) { + this.consistencyToken = consistencyToken; + return this; + } + + public ConsistencyToken getConsistencyToken() { + return consistencyToken; + } + + public CheckPolicyResponsePb setIsPermitted(Boolean isPermitted) { + this.isPermitted = isPermitted; + return this; + } + + public Boolean getIsPermitted() { + return isPermitted; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CheckPolicyResponsePb that = (CheckPolicyResponsePb) o; + return Objects.equals(consistencyToken, that.consistencyToken) + && Objects.equals(isPermitted, that.isPermitted); + } + + @Override + public int hashCode() { + return Objects.hash(consistencyToken, isPermitted); + } + + @Override + public String toString() { + return new ToStringer(CheckPolicyResponsePb.class) + .add("consistencyToken", consistencyToken) + .add("isPermitted", isPermitted) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValue.java index e05f45e7c..60a7f499b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValue.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ComplexValue.ComplexValueSerializer.class) +@JsonDeserialize(using = ComplexValue.ComplexValueDeserializer.class) public class ComplexValue { /** */ - @JsonProperty("display") private String display; /** */ - @JsonProperty("primary") private Boolean primary; /** */ - @JsonProperty("$ref") private String ref; /** */ - @JsonProperty("type") private String typeValue; /** */ - @JsonProperty("value") private String value; public ComplexValue setDisplay(String display) { @@ -101,4 +107,45 @@ public String toString() { .add("value", value) .toString(); } + + ComplexValuePb toPb() { + ComplexValuePb pb = new ComplexValuePb(); + pb.setDisplay(display); + pb.setPrimary(primary); + pb.setRef(ref); + pb.setType(typeValue); + pb.setValue(value); + + return pb; + } + + static ComplexValue fromPb(ComplexValuePb pb) { + ComplexValue model = new ComplexValue(); + model.setDisplay(pb.getDisplay()); + model.setPrimary(pb.getPrimary()); + model.setRef(pb.getRef()); + model.setType(pb.getType()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ComplexValueSerializer extends JsonSerializer { + @Override + public void serialize(ComplexValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComplexValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComplexValueDeserializer extends JsonDeserializer { + @Override + public ComplexValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComplexValuePb pb = mapper.readValue(p, ComplexValuePb.class); + return ComplexValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValuePb.java new file mode 100755 index 000000000..dbd17e116 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ComplexValuePb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ComplexValuePb { + @JsonProperty("display") + private String display; + + @JsonProperty("primary") + private Boolean primary; + + @JsonProperty("$ref") + private String ref; + + @JsonProperty("type") + private String typeValue; + + @JsonProperty("value") + private String value; + + public ComplexValuePb setDisplay(String display) { + this.display = display; + return this; + } + + public String getDisplay() { + return display; + } + + public ComplexValuePb setPrimary(Boolean primary) { + this.primary = primary; + return this; + } + + public Boolean getPrimary() { + return primary; + } + + public ComplexValuePb setRef(String ref) { + this.ref = ref; + return this; + } + + public String getRef() { + return ref; + } + + public ComplexValuePb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + public ComplexValuePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComplexValuePb that = (ComplexValuePb) o; + return Objects.equals(display, that.display) + && Objects.equals(primary, that.primary) + && Objects.equals(ref, that.ref) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(display, primary, ref, typeValue, value); + } + + @Override + public String toString() { + return new ToStringer(ComplexValuePb.class) + .add("display", display) + .add("primary", primary) + .add("ref", ref) + .add("typeValue", typeValue) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java index 020aaee20..7e18be0f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ConsistencyToken.ConsistencyTokenSerializer.class) +@JsonDeserialize(using = ConsistencyToken.ConsistencyTokenDeserializer.class) public class ConsistencyToken { /** */ - @JsonProperty("value") private String value; public ConsistencyToken setValue(String value) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(ConsistencyToken.class).add("value", value).toString(); } + + ConsistencyTokenPb toPb() { + ConsistencyTokenPb pb = new ConsistencyTokenPb(); + pb.setValue(value); + + return pb; + } + + static ConsistencyToken fromPb(ConsistencyTokenPb pb) { + ConsistencyToken model = new ConsistencyToken(); + model.setValue(pb.getValue()); + + return model; + } + + public static class ConsistencyTokenSerializer extends JsonSerializer { + @Override + public void serialize(ConsistencyToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ConsistencyTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ConsistencyTokenDeserializer extends JsonDeserializer { + @Override + public ConsistencyToken deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ConsistencyTokenPb pb = mapper.readValue(p, ConsistencyTokenPb.class); + return ConsistencyToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyTokenPb.java new file mode 100755 index 000000000..ff8874de4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyTokenPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ConsistencyTokenPb { + @JsonProperty("value") + private String value; + + public ConsistencyTokenPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConsistencyTokenPb that = (ConsistencyTokenPb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(ConsistencyTokenPb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Converters.java new file mode 100755 index 000000000..d05d1af58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.iam; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java index bc96255a3..1d08874e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a group. */ @Generated +@JsonSerialize(using = DeleteAccountGroupRequest.DeleteAccountGroupRequestSerializer.class) +@JsonDeserialize(using = DeleteAccountGroupRequest.DeleteAccountGroupRequestDeserializer.class) public class DeleteAccountGroupRequest { /** Unique ID for a group in the Databricks account. */ - @JsonIgnore private String id; + private String id; public DeleteAccountGroupRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAccountGroupRequest.class).add("id", id).toString(); } + + DeleteAccountGroupRequestPb toPb() { + DeleteAccountGroupRequestPb pb = new DeleteAccountGroupRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteAccountGroupRequest fromPb(DeleteAccountGroupRequestPb pb) { + DeleteAccountGroupRequest model = new DeleteAccountGroupRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteAccountGroupRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountGroupRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountGroupRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountGroupRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountGroupRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountGroupRequestPb pb = mapper.readValue(p, DeleteAccountGroupRequestPb.class); + return DeleteAccountGroupRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequestPb.java new file mode 100755 index 000000000..48a33be1f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a group. */ +@Generated +class DeleteAccountGroupRequestPb { + @JsonIgnore private String id; + + public DeleteAccountGroupRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountGroupRequestPb that = (DeleteAccountGroupRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountGroupRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java index 8970debe2..abe74383a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a service principal. */ @Generated +@JsonSerialize( + using = + DeleteAccountServicePrincipalRequest.DeleteAccountServicePrincipalRequestSerializer.class) +@JsonDeserialize( + using = + DeleteAccountServicePrincipalRequest.DeleteAccountServicePrincipalRequestDeserializer.class) public class DeleteAccountServicePrincipalRequest { /** Unique ID for a service principal in the Databricks account. */ - @JsonIgnore private String id; + private String id; public DeleteAccountServicePrincipalRequest setId(String id) { this.id = id; @@ -39,4 +54,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAccountServicePrincipalRequest.class).add("id", id).toString(); } + + DeleteAccountServicePrincipalRequestPb toPb() { + DeleteAccountServicePrincipalRequestPb pb = new DeleteAccountServicePrincipalRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteAccountServicePrincipalRequest fromPb(DeleteAccountServicePrincipalRequestPb pb) { + DeleteAccountServicePrincipalRequest model = new DeleteAccountServicePrincipalRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteAccountServicePrincipalRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountServicePrincipalRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountServicePrincipalRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountServicePrincipalRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountServicePrincipalRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountServicePrincipalRequestPb pb = + mapper.readValue(p, DeleteAccountServicePrincipalRequestPb.class); + return DeleteAccountServicePrincipalRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequestPb.java new file mode 100755 index 000000000..34994fe20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a service principal. */ +@Generated +class DeleteAccountServicePrincipalRequestPb { + @JsonIgnore private String id; + + public DeleteAccountServicePrincipalRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountServicePrincipalRequestPb that = (DeleteAccountServicePrincipalRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountServicePrincipalRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java index 3b41e0c7c..165c29f67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a user. */ @Generated +@JsonSerialize(using = DeleteAccountUserRequest.DeleteAccountUserRequestSerializer.class) +@JsonDeserialize(using = DeleteAccountUserRequest.DeleteAccountUserRequestDeserializer.class) public class DeleteAccountUserRequest { /** Unique ID for a user in the Databricks account. */ - @JsonIgnore private String id; + private String id; public DeleteAccountUserRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAccountUserRequest.class).add("id", id).toString(); } + + DeleteAccountUserRequestPb toPb() { + DeleteAccountUserRequestPb pb = new DeleteAccountUserRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteAccountUserRequest fromPb(DeleteAccountUserRequestPb pb) { + DeleteAccountUserRequest model = new DeleteAccountUserRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteAccountUserRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountUserRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountUserRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountUserRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountUserRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountUserRequestPb pb = mapper.readValue(p, DeleteAccountUserRequestPb.class); + return DeleteAccountUserRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequestPb.java new file mode 100755 index 000000000..b0db7f984 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a user. */ +@Generated +class DeleteAccountUserRequestPb { + @JsonIgnore private String id; + + public DeleteAccountUserRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountUserRequestPb that = (DeleteAccountUserRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountUserRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java index f7ca0d7ee..28d8415ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a group. */ @Generated +@JsonSerialize(using = DeleteGroupRequest.DeleteGroupRequestSerializer.class) +@JsonDeserialize(using = DeleteGroupRequest.DeleteGroupRequestDeserializer.class) public class DeleteGroupRequest { /** Unique ID for a group in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; public DeleteGroupRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteGroupRequest.class).add("id", id).toString(); } + + DeleteGroupRequestPb toPb() { + DeleteGroupRequestPb pb = new DeleteGroupRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteGroupRequest fromPb(DeleteGroupRequestPb pb) { + DeleteGroupRequest model = new DeleteGroupRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteGroupRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteGroupRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteGroupRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteGroupRequestDeserializer extends JsonDeserializer { + @Override + public DeleteGroupRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteGroupRequestPb pb = mapper.readValue(p, DeleteGroupRequestPb.class); + return DeleteGroupRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequestPb.java new file mode 100755 index 000000000..728277dd9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a group. */ +@Generated +class DeleteGroupRequestPb { + @JsonIgnore private String id; + + public DeleteGroupRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteGroupRequestPb that = (DeleteGroupRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteGroupRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java index c0a1c8f66..f32a21b51 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponsePb.java new file mode 100755 index 000000000..b3d7c9aeb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java index e24962cbc..35cc80e30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a service principal. */ @Generated +@JsonSerialize(using = DeleteServicePrincipalRequest.DeleteServicePrincipalRequestSerializer.class) +@JsonDeserialize( + using = DeleteServicePrincipalRequest.DeleteServicePrincipalRequestDeserializer.class) public class DeleteServicePrincipalRequest { /** Unique ID for a service principal in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; public DeleteServicePrincipalRequest setId(String id) { this.id = id; @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteServicePrincipalRequest.class).add("id", id).toString(); } + + DeleteServicePrincipalRequestPb toPb() { + DeleteServicePrincipalRequestPb pb = new DeleteServicePrincipalRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteServicePrincipalRequest fromPb(DeleteServicePrincipalRequestPb pb) { + DeleteServicePrincipalRequest model = new DeleteServicePrincipalRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteServicePrincipalRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteServicePrincipalRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteServicePrincipalRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteServicePrincipalRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteServicePrincipalRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteServicePrincipalRequestPb pb = + mapper.readValue(p, DeleteServicePrincipalRequestPb.class); + return DeleteServicePrincipalRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequestPb.java new file mode 100755 index 000000000..8ac50dc82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a service principal. */ +@Generated +class DeleteServicePrincipalRequestPb { + @JsonIgnore private String id; + + public DeleteServicePrincipalRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServicePrincipalRequestPb that = (DeleteServicePrincipalRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteServicePrincipalRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java index 05f3dcad4..59982467e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a user. */ @Generated +@JsonSerialize(using = DeleteUserRequest.DeleteUserRequestSerializer.class) +@JsonDeserialize(using = DeleteUserRequest.DeleteUserRequestDeserializer.class) public class DeleteUserRequest { /** Unique ID for a user in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; public DeleteUserRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteUserRequest.class).add("id", id).toString(); } + + DeleteUserRequestPb toPb() { + DeleteUserRequestPb pb = new DeleteUserRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteUserRequest fromPb(DeleteUserRequestPb pb) { + DeleteUserRequest model = new DeleteUserRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteUserRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteUserRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteUserRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteUserRequestDeserializer extends JsonDeserializer { + @Override + public DeleteUserRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteUserRequestPb pb = mapper.readValue(p, DeleteUserRequestPb.class); + return DeleteUserRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequestPb.java new file mode 100755 index 000000000..52db576a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a user. */ +@Generated +class DeleteUserRequestPb { + @JsonIgnore private String id; + + public DeleteUserRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteUserRequestPb that = (DeleteUserRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteUserRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java index 3aaf9b8bb..510936bbb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java @@ -4,17 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete permissions assignment */ @Generated +@JsonSerialize( + using = DeleteWorkspaceAssignmentRequest.DeleteWorkspaceAssignmentRequestSerializer.class) +@JsonDeserialize( + using = DeleteWorkspaceAssignmentRequest.DeleteWorkspaceAssignmentRequestDeserializer.class) public class DeleteWorkspaceAssignmentRequest { /** The ID of the user, service principal, or group. */ - @JsonIgnore private Long principalId; + private Long principalId; /** The workspace ID for the account. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public DeleteWorkspaceAssignmentRequest setPrincipalId(Long principalId) { this.principalId = principalId; @@ -55,4 +68,44 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + DeleteWorkspaceAssignmentRequestPb toPb() { + DeleteWorkspaceAssignmentRequestPb pb = new DeleteWorkspaceAssignmentRequestPb(); + pb.setPrincipalId(principalId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static DeleteWorkspaceAssignmentRequest fromPb(DeleteWorkspaceAssignmentRequestPb pb) { + DeleteWorkspaceAssignmentRequest model = new DeleteWorkspaceAssignmentRequest(); + model.setPrincipalId(pb.getPrincipalId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class DeleteWorkspaceAssignmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWorkspaceAssignmentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWorkspaceAssignmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWorkspaceAssignmentRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteWorkspaceAssignmentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWorkspaceAssignmentRequestPb pb = + mapper.readValue(p, DeleteWorkspaceAssignmentRequestPb.class); + return DeleteWorkspaceAssignmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequestPb.java new file mode 100755 index 000000000..199fa6a02 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete permissions assignment */ +@Generated +class DeleteWorkspaceAssignmentRequestPb { + @JsonIgnore private Long principalId; + + @JsonIgnore private Long workspaceId; + + public DeleteWorkspaceAssignmentRequestPb setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public DeleteWorkspaceAssignmentRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWorkspaceAssignmentRequestPb that = (DeleteWorkspaceAssignmentRequestPb) o; + return Objects.equals(principalId, that.principalId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(principalId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspaceAssignmentRequestPb.class) + .add("principalId", principalId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java index 2b53b7330..9e5a9a7cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java @@ -4,9 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + DeleteWorkspacePermissionAssignmentResponse + .DeleteWorkspacePermissionAssignmentResponseSerializer.class) +@JsonDeserialize( + using = + DeleteWorkspacePermissionAssignmentResponse + .DeleteWorkspacePermissionAssignmentResponseDeserializer.class) public class DeleteWorkspacePermissionAssignmentResponse { @Override @@ -25,4 +43,45 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWorkspacePermissionAssignmentResponse.class).toString(); } + + DeleteWorkspacePermissionAssignmentResponsePb toPb() { + DeleteWorkspacePermissionAssignmentResponsePb pb = + new DeleteWorkspacePermissionAssignmentResponsePb(); + + return pb; + } + + static DeleteWorkspacePermissionAssignmentResponse fromPb( + DeleteWorkspacePermissionAssignmentResponsePb pb) { + DeleteWorkspacePermissionAssignmentResponse model = + new DeleteWorkspacePermissionAssignmentResponse(); + + return model; + } + + public static class DeleteWorkspacePermissionAssignmentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWorkspacePermissionAssignmentResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteWorkspacePermissionAssignmentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWorkspacePermissionAssignmentResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteWorkspacePermissionAssignmentResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWorkspacePermissionAssignmentResponsePb pb = + mapper.readValue(p, DeleteWorkspacePermissionAssignmentResponsePb.class); + return DeleteWorkspacePermissionAssignmentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponsePb.java new file mode 100755 index 000000000..9f1b3eec2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteWorkspacePermissionAssignmentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspacePermissionAssignmentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java index 03afecc6c..506628dd0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get group details. */ @Generated +@JsonSerialize(using = GetAccountGroupRequest.GetAccountGroupRequestSerializer.class) +@JsonDeserialize(using = GetAccountGroupRequest.GetAccountGroupRequestDeserializer.class) public class GetAccountGroupRequest { /** Unique ID for a group in the Databricks account. */ - @JsonIgnore private String id; + private String id; public GetAccountGroupRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetAccountGroupRequest.class).add("id", id).toString(); } + + GetAccountGroupRequestPb toPb() { + GetAccountGroupRequestPb pb = new GetAccountGroupRequestPb(); + pb.setId(id); + + return pb; + } + + static GetAccountGroupRequest fromPb(GetAccountGroupRequestPb pb) { + GetAccountGroupRequest model = new GetAccountGroupRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetAccountGroupRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountGroupRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountGroupRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountGroupRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountGroupRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountGroupRequestPb pb = mapper.readValue(p, GetAccountGroupRequestPb.class); + return GetAccountGroupRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequestPb.java new file mode 100755 index 000000000..e67067014 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get group details. */ +@Generated +class GetAccountGroupRequestPb { + @JsonIgnore private String id; + + public GetAccountGroupRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountGroupRequestPb that = (GetAccountGroupRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetAccountGroupRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java index bc6682251..91df379f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get service principal details. */ @Generated +@JsonSerialize( + using = GetAccountServicePrincipalRequest.GetAccountServicePrincipalRequestSerializer.class) +@JsonDeserialize( + using = GetAccountServicePrincipalRequest.GetAccountServicePrincipalRequestDeserializer.class) public class GetAccountServicePrincipalRequest { /** Unique ID for a service principal in the Databricks account. */ - @JsonIgnore private String id; + private String id; public GetAccountServicePrincipalRequest setId(String id) { this.id = id; @@ -39,4 +52,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetAccountServicePrincipalRequest.class).add("id", id).toString(); } + + GetAccountServicePrincipalRequestPb toPb() { + GetAccountServicePrincipalRequestPb pb = new GetAccountServicePrincipalRequestPb(); + pb.setId(id); + + return pb; + } + + static GetAccountServicePrincipalRequest fromPb(GetAccountServicePrincipalRequestPb pb) { + GetAccountServicePrincipalRequest model = new GetAccountServicePrincipalRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetAccountServicePrincipalRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountServicePrincipalRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountServicePrincipalRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountServicePrincipalRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountServicePrincipalRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountServicePrincipalRequestPb pb = + mapper.readValue(p, GetAccountServicePrincipalRequestPb.class); + return GetAccountServicePrincipalRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequestPb.java new file mode 100755 index 000000000..b36b9c9d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get service principal details. */ +@Generated +class GetAccountServicePrincipalRequestPb { + @JsonIgnore private String id; + + public GetAccountServicePrincipalRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountServicePrincipalRequestPb that = (GetAccountServicePrincipalRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetAccountServicePrincipalRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java index d36f2bc97..2dcd50018 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get user details. */ @Generated +@JsonSerialize(using = GetAccountUserRequest.GetAccountUserRequestSerializer.class) +@JsonDeserialize(using = GetAccountUserRequest.GetAccountUserRequestDeserializer.class) public class GetAccountUserRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. Default is 10000. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,29 +38,21 @@ public class GetAccountUserRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Unique ID for a user in the Databricks account. */ - @JsonIgnore private String id; + private String id; /** * Attribute to sort the results. Multi-part paths are supported. For example, `userName`, * `name.givenName`, and `emails`. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private GetSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public GetAccountUserRequest setAttributes(String attributes) { @@ -165,4 +161,55 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + GetAccountUserRequestPb toPb() { + GetAccountUserRequestPb pb = new GetAccountUserRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setId(id); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static GetAccountUserRequest fromPb(GetAccountUserRequestPb pb) { + GetAccountUserRequest model = new GetAccountUserRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setId(pb.getId()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class GetAccountUserRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountUserRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountUserRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountUserRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountUserRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountUserRequestPb pb = mapper.readValue(p, GetAccountUserRequestPb.class); + return GetAccountUserRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequestPb.java new file mode 100755 index 000000000..cce8dafaa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequestPb.java @@ -0,0 +1,150 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get user details. */ +@Generated +class GetAccountUserRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore private String id; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private GetSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public GetAccountUserRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public GetAccountUserRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public GetAccountUserRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public GetAccountUserRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public GetAccountUserRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GetAccountUserRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public GetAccountUserRequestPb setSortOrder(GetSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public GetSortOrder getSortOrder() { + return sortOrder; + } + + public GetAccountUserRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountUserRequestPb that = (GetAccountUserRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(id, that.id) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, id, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(GetAccountUserRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("id", id) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java index 2b034b2d3..6a50e351a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java @@ -3,13 +3,27 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get assignable roles for a resource */ @Generated +@JsonSerialize( + using = + GetAssignableRolesForResourceRequest.GetAssignableRolesForResourceRequestSerializer.class) +@JsonDeserialize( + using = + GetAssignableRolesForResourceRequest.GetAssignableRolesForResourceRequestDeserializer.class) public class GetAssignableRolesForResourceRequest { /** * The resource name for which assignable roles will be listed. @@ -19,8 +33,6 @@ public class GetAssignableRolesForResourceRequest { * `resource=accounts//servicePrincipals/` | A resource name for the service * principal. */ - @JsonIgnore - @QueryParam("resource") private String resource; public GetAssignableRolesForResourceRequest setResource(String resource) { @@ -51,4 +63,42 @@ public String toString() { .add("resource", resource) .toString(); } + + GetAssignableRolesForResourceRequestPb toPb() { + GetAssignableRolesForResourceRequestPb pb = new GetAssignableRolesForResourceRequestPb(); + pb.setResource(resource); + + return pb; + } + + static GetAssignableRolesForResourceRequest fromPb(GetAssignableRolesForResourceRequestPb pb) { + GetAssignableRolesForResourceRequest model = new GetAssignableRolesForResourceRequest(); + model.setResource(pb.getResource()); + + return model; + } + + public static class GetAssignableRolesForResourceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAssignableRolesForResourceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAssignableRolesForResourceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAssignableRolesForResourceRequestDeserializer + extends JsonDeserializer { + @Override + public GetAssignableRolesForResourceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAssignableRolesForResourceRequestPb pb = + mapper.readValue(p, GetAssignableRolesForResourceRequestPb.class); + return GetAssignableRolesForResourceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequestPb.java new file mode 100755 index 000000000..c94b15c16 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get assignable roles for a resource */ +@Generated +class GetAssignableRolesForResourceRequestPb { + @JsonIgnore + @QueryParam("resource") + private String resource; + + public GetAssignableRolesForResourceRequestPb setResource(String resource) { + this.resource = resource; + return this; + } + + public String getResource() { + return resource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAssignableRolesForResourceRequestPb that = (GetAssignableRolesForResourceRequestPb) o; + return Objects.equals(resource, that.resource); + } + + @Override + public int hashCode() { + return Objects.hash(resource); + } + + @Override + public String toString() { + return new ToStringer(GetAssignableRolesForResourceRequestPb.class) + .add("resource", resource) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponse.java index dedce06fb..cfa3b258b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponse.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetAssignableRolesForResourceResponse.GetAssignableRolesForResourceResponseSerializer.class) +@JsonDeserialize( + using = + GetAssignableRolesForResourceResponse.GetAssignableRolesForResourceResponseDeserializer + .class) public class GetAssignableRolesForResourceResponse { /** */ - @JsonProperty("roles") private Collection roles; public GetAssignableRolesForResourceResponse setRoles(Collection roles) { @@ -42,4 +57,42 @@ public String toString() { .add("roles", roles) .toString(); } + + GetAssignableRolesForResourceResponsePb toPb() { + GetAssignableRolesForResourceResponsePb pb = new GetAssignableRolesForResourceResponsePb(); + pb.setRoles(roles); + + return pb; + } + + static GetAssignableRolesForResourceResponse fromPb(GetAssignableRolesForResourceResponsePb pb) { + GetAssignableRolesForResourceResponse model = new GetAssignableRolesForResourceResponse(); + model.setRoles(pb.getRoles()); + + return model; + } + + public static class GetAssignableRolesForResourceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAssignableRolesForResourceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAssignableRolesForResourceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAssignableRolesForResourceResponseDeserializer + extends JsonDeserializer { + @Override + public GetAssignableRolesForResourceResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAssignableRolesForResourceResponsePb pb = + mapper.readValue(p, GetAssignableRolesForResourceResponsePb.class); + return GetAssignableRolesForResourceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponsePb.java new file mode 100755 index 000000000..c7d67cc92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetAssignableRolesForResourceResponsePb { + @JsonProperty("roles") + private Collection roles; + + public GetAssignableRolesForResourceResponsePb setRoles(Collection roles) { + this.roles = roles; + return this; + } + + public Collection getRoles() { + return roles; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAssignableRolesForResourceResponsePb that = (GetAssignableRolesForResourceResponsePb) o; + return Objects.equals(roles, that.roles); + } + + @Override + public int hashCode() { + return Objects.hash(roles); + } + + @Override + public String toString() { + return new ToStringer(GetAssignableRolesForResourceResponsePb.class) + .add("roles", roles) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java index 45820a35f..18c98f26f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get group details. */ @Generated +@JsonSerialize(using = GetGroupRequest.GetGroupRequestSerializer.class) +@JsonDeserialize(using = GetGroupRequest.GetGroupRequestDeserializer.class) public class GetGroupRequest { /** Unique ID for a group in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; public GetGroupRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetGroupRequest.class).add("id", id).toString(); } + + GetGroupRequestPb toPb() { + GetGroupRequestPb pb = new GetGroupRequestPb(); + pb.setId(id); + + return pb; + } + + static GetGroupRequest fromPb(GetGroupRequestPb pb) { + GetGroupRequest model = new GetGroupRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetGroupRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetGroupRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetGroupRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetGroupRequestDeserializer extends JsonDeserializer { + @Override + public GetGroupRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetGroupRequestPb pb = mapper.readValue(p, GetGroupRequestPb.class); + return GetGroupRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequestPb.java new file mode 100755 index 000000000..ecd8d33eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get group details. */ +@Generated +class GetGroupRequestPb { + @JsonIgnore private String id; + + public GetGroupRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetGroupRequestPb that = (GetGroupRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetGroupRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponse.java index 050fdf672..bcc0932af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponse.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetPasswordPermissionLevelsResponse.GetPasswordPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetPasswordPermissionLevelsResponse.GetPasswordPermissionLevelsResponseDeserializer.class) public class GetPasswordPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetPasswordPermissionLevelsResponse setPermissionLevels( @@ -43,4 +56,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetPasswordPermissionLevelsResponsePb toPb() { + GetPasswordPermissionLevelsResponsePb pb = new GetPasswordPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetPasswordPermissionLevelsResponse fromPb(GetPasswordPermissionLevelsResponsePb pb) { + GetPasswordPermissionLevelsResponse model = new GetPasswordPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetPasswordPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPasswordPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPasswordPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPasswordPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetPasswordPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPasswordPermissionLevelsResponsePb pb = + mapper.readValue(p, GetPasswordPermissionLevelsResponsePb.class); + return GetPasswordPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponsePb.java new file mode 100755 index 000000000..679eceb8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPasswordPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPasswordPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetPasswordPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPasswordPermissionLevelsResponsePb that = (GetPasswordPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetPasswordPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java index d48921d05..9e1b25078 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get object permission levels */ @Generated +@JsonSerialize(using = GetPermissionLevelsRequest.GetPermissionLevelsRequestSerializer.class) +@JsonDeserialize(using = GetPermissionLevelsRequest.GetPermissionLevelsRequestDeserializer.class) public class GetPermissionLevelsRequest { /** */ - @JsonIgnore private String requestObjectId; + private String requestObjectId; /** * The type of the request object. Can be one of the following: alerts, authorization, clusters, @@ -19,7 +30,7 @@ public class GetPermissionLevelsRequest { * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ - @JsonIgnore private String requestObjectType; + private String requestObjectType; public GetPermissionLevelsRequest setRequestObjectId(String requestObjectId) { this.requestObjectId = requestObjectId; @@ -60,4 +71,43 @@ public String toString() { .add("requestObjectType", requestObjectType) .toString(); } + + GetPermissionLevelsRequestPb toPb() { + GetPermissionLevelsRequestPb pb = new GetPermissionLevelsRequestPb(); + pb.setRequestObjectId(requestObjectId); + pb.setRequestObjectType(requestObjectType); + + return pb; + } + + static GetPermissionLevelsRequest fromPb(GetPermissionLevelsRequestPb pb) { + GetPermissionLevelsRequest model = new GetPermissionLevelsRequest(); + model.setRequestObjectId(pb.getRequestObjectId()); + model.setRequestObjectType(pb.getRequestObjectType()); + + return model; + } + + public static class GetPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetPermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPermissionLevelsRequestPb pb = mapper.readValue(p, GetPermissionLevelsRequestPb.class); + return GetPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequestPb.java new file mode 100755 index 000000000..17daef03c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get object permission levels */ +@Generated +class GetPermissionLevelsRequestPb { + @JsonIgnore private String requestObjectId; + + @JsonIgnore private String requestObjectType; + + public GetPermissionLevelsRequestPb setRequestObjectId(String requestObjectId) { + this.requestObjectId = requestObjectId; + return this; + } + + public String getRequestObjectId() { + return requestObjectId; + } + + public GetPermissionLevelsRequestPb setRequestObjectType(String requestObjectType) { + this.requestObjectType = requestObjectType; + return this; + } + + public String getRequestObjectType() { + return requestObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPermissionLevelsRequestPb that = (GetPermissionLevelsRequestPb) o; + return Objects.equals(requestObjectId, that.requestObjectId) + && Objects.equals(requestObjectType, that.requestObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(requestObjectId, requestObjectType); + } + + @Override + public String toString() { + return new ToStringer(GetPermissionLevelsRequestPb.class) + .add("requestObjectId", requestObjectId) + .add("requestObjectType", requestObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponse.java index e055b5fe0..307f3f633 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetPermissionLevelsResponse.GetPermissionLevelsResponseSerializer.class) +@JsonDeserialize(using = GetPermissionLevelsResponse.GetPermissionLevelsResponseDeserializer.class) public class GetPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetPermissionLevelsResponse setPermissionLevels( @@ -43,4 +53,41 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetPermissionLevelsResponsePb toPb() { + GetPermissionLevelsResponsePb pb = new GetPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetPermissionLevelsResponse fromPb(GetPermissionLevelsResponsePb pb) { + GetPermissionLevelsResponse model = new GetPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPermissionLevelsResponsePb pb = mapper.readValue(p, GetPermissionLevelsResponsePb.class); + return GetPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponsePb.java new file mode 100755 index 000000000..cf4146447 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPermissionLevelsResponsePb that = (GetPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java index a53dc9247..544609658 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get object permissions */ @Generated +@JsonSerialize(using = GetPermissionRequest.GetPermissionRequestSerializer.class) +@JsonDeserialize(using = GetPermissionRequest.GetPermissionRequestDeserializer.class) public class GetPermissionRequest { /** The id of the request object. */ - @JsonIgnore private String requestObjectId; + private String requestObjectId; /** * The type of the request object. Can be one of the following: alerts, authorization, clusters, @@ -19,7 +30,7 @@ public class GetPermissionRequest { * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ - @JsonIgnore private String requestObjectType; + private String requestObjectType; public GetPermissionRequest setRequestObjectId(String requestObjectId) { this.requestObjectId = requestObjectId; @@ -60,4 +71,42 @@ public String toString() { .add("requestObjectType", requestObjectType) .toString(); } + + GetPermissionRequestPb toPb() { + GetPermissionRequestPb pb = new GetPermissionRequestPb(); + pb.setRequestObjectId(requestObjectId); + pb.setRequestObjectType(requestObjectType); + + return pb; + } + + static GetPermissionRequest fromPb(GetPermissionRequestPb pb) { + GetPermissionRequest model = new GetPermissionRequest(); + model.setRequestObjectId(pb.getRequestObjectId()); + model.setRequestObjectType(pb.getRequestObjectType()); + + return model; + } + + public static class GetPermissionRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GetPermissionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPermissionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPermissionRequestDeserializer + extends JsonDeserializer { + @Override + public GetPermissionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPermissionRequestPb pb = mapper.readValue(p, GetPermissionRequestPb.class); + return GetPermissionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequestPb.java new file mode 100755 index 000000000..71e2cd319 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get object permissions */ +@Generated +class GetPermissionRequestPb { + @JsonIgnore private String requestObjectId; + + @JsonIgnore private String requestObjectType; + + public GetPermissionRequestPb setRequestObjectId(String requestObjectId) { + this.requestObjectId = requestObjectId; + return this; + } + + public String getRequestObjectId() { + return requestObjectId; + } + + public GetPermissionRequestPb setRequestObjectType(String requestObjectType) { + this.requestObjectType = requestObjectType; + return this; + } + + public String getRequestObjectType() { + return requestObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPermissionRequestPb that = (GetPermissionRequestPb) o; + return Objects.equals(requestObjectId, that.requestObjectId) + && Objects.equals(requestObjectType, that.requestObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(requestObjectId, requestObjectType); + } + + @Override + public String toString() { + return new ToStringer(GetPermissionRequestPb.class) + .add("requestObjectId", requestObjectId) + .add("requestObjectType", requestObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java index 611758c5e..d436698b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a rule set */ @Generated +@JsonSerialize(using = GetRuleSetRequest.GetRuleSetRequestSerializer.class) +@JsonDeserialize(using = GetRuleSetRequest.GetRuleSetRequestDeserializer.class) public class GetRuleSetRequest { /** * Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used @@ -23,8 +33,6 @@ public class GetRuleSetRequest { * no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` * | An etag encoded a specific version of the rule set to get or to be updated. */ - @JsonIgnore - @QueryParam("etag") private String etag; /** @@ -36,8 +44,6 @@ public class GetRuleSetRequest { * `name=accounts//servicePrincipals//ruleSets/default` * | A name for a rule set on the service principal. */ - @JsonIgnore - @QueryParam("name") private String name; public GetRuleSetRequest setEtag(String etag) { @@ -75,4 +81,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetRuleSetRequest.class).add("etag", etag).add("name", name).toString(); } + + GetRuleSetRequestPb toPb() { + GetRuleSetRequestPb pb = new GetRuleSetRequestPb(); + pb.setEtag(etag); + pb.setName(name); + + return pb; + } + + static GetRuleSetRequest fromPb(GetRuleSetRequestPb pb) { + GetRuleSetRequest model = new GetRuleSetRequest(); + model.setEtag(pb.getEtag()); + model.setName(pb.getName()); + + return model; + } + + public static class GetRuleSetRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRuleSetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRuleSetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRuleSetRequestDeserializer extends JsonDeserializer { + @Override + public GetRuleSetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRuleSetRequestPb pb = mapper.readValue(p, GetRuleSetRequestPb.class); + return GetRuleSetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequestPb.java new file mode 100755 index 000000000..4529a5796 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a rule set */ +@Generated +class GetRuleSetRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + @JsonIgnore + @QueryParam("name") + private String name; + + public GetRuleSetRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public GetRuleSetRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRuleSetRequestPb that = (GetRuleSetRequestPb) o; + return Objects.equals(etag, that.etag) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(etag, name); + } + + @Override + public String toString() { + return new ToStringer(GetRuleSetRequestPb.class).add("etag", etag).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java index b6f108016..a3b7e8c16 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get service principal details. */ @Generated +@JsonSerialize(using = GetServicePrincipalRequest.GetServicePrincipalRequestSerializer.class) +@JsonDeserialize(using = GetServicePrincipalRequest.GetServicePrincipalRequestDeserializer.class) public class GetServicePrincipalRequest { /** Unique ID for a service principal in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; public GetServicePrincipalRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetServicePrincipalRequest.class).add("id", id).toString(); } + + GetServicePrincipalRequestPb toPb() { + GetServicePrincipalRequestPb pb = new GetServicePrincipalRequestPb(); + pb.setId(id); + + return pb; + } + + static GetServicePrincipalRequest fromPb(GetServicePrincipalRequestPb pb) { + GetServicePrincipalRequest model = new GetServicePrincipalRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetServicePrincipalRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServicePrincipalRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetServicePrincipalRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServicePrincipalRequestDeserializer + extends JsonDeserializer { + @Override + public GetServicePrincipalRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServicePrincipalRequestPb pb = mapper.readValue(p, GetServicePrincipalRequestPb.class); + return GetServicePrincipalRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequestPb.java new file mode 100755 index 000000000..16a2d8a6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get service principal details. */ +@Generated +class GetServicePrincipalRequestPb { + @JsonIgnore private String id; + + public GetServicePrincipalRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServicePrincipalRequestPb that = (GetServicePrincipalRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetServicePrincipalRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java index 641367c1f..9eac09a12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get user details. */ @Generated +@JsonSerialize(using = GetUserRequest.GetUserRequestSerializer.class) +@JsonDeserialize(using = GetUserRequest.GetUserRequestDeserializer.class) public class GetUserRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,29 +38,21 @@ public class GetUserRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Unique ID for a user in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; /** * Attribute to sort the results. Multi-part paths are supported. For example, `userName`, * `name.givenName`, and `emails`. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private GetSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public GetUserRequest setAttributes(String attributes) { @@ -165,4 +161,52 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + GetUserRequestPb toPb() { + GetUserRequestPb pb = new GetUserRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setId(id); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static GetUserRequest fromPb(GetUserRequestPb pb) { + GetUserRequest model = new GetUserRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setId(pb.getId()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class GetUserRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetUserRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetUserRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetUserRequestDeserializer extends JsonDeserializer { + @Override + public GetUserRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetUserRequestPb pb = mapper.readValue(p, GetUserRequestPb.class); + return GetUserRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequestPb.java new file mode 100755 index 000000000..3aba3ce90 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequestPb.java @@ -0,0 +1,150 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get user details. */ +@Generated +class GetUserRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore private String id; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private GetSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public GetUserRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public GetUserRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public GetUserRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public GetUserRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public GetUserRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GetUserRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public GetUserRequestPb setSortOrder(GetSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public GetSortOrder getSortOrder() { + return sortOrder; + } + + public GetUserRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUserRequestPb that = (GetUserRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(id, that.id) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, id, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(GetUserRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("id", id) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequest.java index 0131e1657..9d0063ec1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List workspace permissions */ @Generated +@JsonSerialize(using = GetWorkspaceAssignmentRequest.GetWorkspaceAssignmentRequestSerializer.class) +@JsonDeserialize( + using = GetWorkspaceAssignmentRequest.GetWorkspaceAssignmentRequestDeserializer.class) public class GetWorkspaceAssignmentRequest { /** The workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public GetWorkspaceAssignmentRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -41,4 +53,42 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + GetWorkspaceAssignmentRequestPb toPb() { + GetWorkspaceAssignmentRequestPb pb = new GetWorkspaceAssignmentRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static GetWorkspaceAssignmentRequest fromPb(GetWorkspaceAssignmentRequestPb pb) { + GetWorkspaceAssignmentRequest model = new GetWorkspaceAssignmentRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class GetWorkspaceAssignmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceAssignmentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceAssignmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceAssignmentRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceAssignmentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceAssignmentRequestPb pb = + mapper.readValue(p, GetWorkspaceAssignmentRequestPb.class); + return GetWorkspaceAssignmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequestPb.java new file mode 100755 index 000000000..7cb1f9987 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetWorkspaceAssignmentRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List workspace permissions */ +@Generated +class GetWorkspaceAssignmentRequestPb { + @JsonIgnore private Long workspaceId; + + public GetWorkspaceAssignmentRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceAssignmentRequestPb that = (GetWorkspaceAssignmentRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceAssignmentRequestPb.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java index fffa26c0c..ebac193a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GrantRule.GrantRuleSerializer.class) +@JsonDeserialize(using = GrantRule.GrantRuleDeserializer.class) public class GrantRule { /** * Principals this grant rule applies to. A principal can be a user (for end users), a service @@ -16,11 +27,9 @@ public class GrantRule { * own identifier format: * users/ * groups/ * * servicePrincipals/ */ - @JsonProperty("principals") private Collection principals; /** Role that is assigned to the list of principals. */ - @JsonProperty("role") private String role; public GrantRule setPrincipals(Collection principals) { @@ -61,4 +70,39 @@ public String toString() { .add("role", role) .toString(); } + + GrantRulePb toPb() { + GrantRulePb pb = new GrantRulePb(); + pb.setPrincipals(principals); + pb.setRole(role); + + return pb; + } + + static GrantRule fromPb(GrantRulePb pb) { + GrantRule model = new GrantRule(); + model.setPrincipals(pb.getPrincipals()); + model.setRole(pb.getRole()); + + return model; + } + + public static class GrantRuleSerializer extends JsonSerializer { + @Override + public void serialize(GrantRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GrantRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GrantRuleDeserializer extends JsonDeserializer { + @Override + public GrantRule deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GrantRulePb pb = mapper.readValue(p, GrantRulePb.class); + return GrantRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRulePb.java new file mode 100755 index 000000000..c4a6fef87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRulePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GrantRulePb { + @JsonProperty("principals") + private Collection principals; + + @JsonProperty("role") + private String role; + + public GrantRulePb setPrincipals(Collection principals) { + this.principals = principals; + return this; + } + + public Collection getPrincipals() { + return principals; + } + + public GrantRulePb setRole(String role) { + this.role = role; + return this; + } + + public String getRole() { + return role; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GrantRulePb that = (GrantRulePb) o; + return Objects.equals(principals, that.principals) && Objects.equals(role, that.role); + } + + @Override + public int hashCode() { + return Objects.hash(principals, role); + } + + @Override + public String toString() { + return new ToStringer(GrantRulePb.class) + .add("principals", principals) + .add("role", role) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Group.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Group.java index a57cb180c..4792fbaf6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Group.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Group.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Group.GroupSerializer.class) +@JsonDeserialize(using = Group.GroupDeserializer.class) public class Group { /** String that represents a human-readable group name */ - @JsonProperty("displayName") private String displayName; /** @@ -21,35 +31,27 @@ public class Group { *

[assigning entitlements]: * https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements */ - @JsonProperty("entitlements") private Collection entitlements; /** */ - @JsonProperty("externalId") private String externalId; /** */ - @JsonProperty("groups") private Collection groups; /** Databricks group ID */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("members") private Collection members; /** Container for the group identifier. Workspace local versus account. */ - @JsonProperty("meta") private ResourceMeta meta; /** Corresponds to AWS instance profile/arn role. */ - @JsonProperty("roles") private Collection roles; /** The schema of the group. */ - @JsonProperty("schemas") private Collection schemas; public Group setDisplayName(String displayName) { @@ -169,4 +171,53 @@ public String toString() { .add("schemas", schemas) .toString(); } + + GroupPb toPb() { + GroupPb pb = new GroupPb(); + pb.setDisplayName(displayName); + pb.setEntitlements(entitlements); + pb.setExternalId(externalId); + pb.setGroups(groups); + pb.setId(id); + pb.setMembers(members); + pb.setMeta(meta); + pb.setRoles(roles); + pb.setSchemas(schemas); + + return pb; + } + + static Group fromPb(GroupPb pb) { + Group model = new Group(); + model.setDisplayName(pb.getDisplayName()); + model.setEntitlements(pb.getEntitlements()); + model.setExternalId(pb.getExternalId()); + model.setGroups(pb.getGroups()); + model.setId(pb.getId()); + model.setMembers(pb.getMembers()); + model.setMeta(pb.getMeta()); + model.setRoles(pb.getRoles()); + model.setSchemas(pb.getSchemas()); + + return model; + } + + public static class GroupSerializer extends JsonSerializer { + @Override + public void serialize(Group value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GroupPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GroupDeserializer extends JsonDeserializer { + @Override + public Group deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GroupPb pb = mapper.readValue(p, GroupPb.class); + return Group.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupPb.java new file mode 100755 index 000000000..e56bbf213 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupPb.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GroupPb { + @JsonProperty("displayName") + private String displayName; + + @JsonProperty("entitlements") + private Collection entitlements; + + @JsonProperty("externalId") + private String externalId; + + @JsonProperty("groups") + private Collection groups; + + @JsonProperty("id") + private String id; + + @JsonProperty("members") + private Collection members; + + @JsonProperty("meta") + private ResourceMeta meta; + + @JsonProperty("roles") + private Collection roles; + + @JsonProperty("schemas") + private Collection schemas; + + public GroupPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public GroupPb setEntitlements(Collection entitlements) { + this.entitlements = entitlements; + return this; + } + + public Collection getEntitlements() { + return entitlements; + } + + public GroupPb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public GroupPb setGroups(Collection groups) { + this.groups = groups; + return this; + } + + public Collection getGroups() { + return groups; + } + + public GroupPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GroupPb setMembers(Collection members) { + this.members = members; + return this; + } + + public Collection getMembers() { + return members; + } + + public GroupPb setMeta(ResourceMeta meta) { + this.meta = meta; + return this; + } + + public ResourceMeta getMeta() { + return meta; + } + + public GroupPb setRoles(Collection roles) { + this.roles = roles; + return this; + } + + public Collection getRoles() { + return roles; + } + + public GroupPb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GroupPb that = (GroupPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(entitlements, that.entitlements) + && Objects.equals(externalId, that.externalId) + && Objects.equals(groups, that.groups) + && Objects.equals(id, that.id) + && Objects.equals(members, that.members) + && Objects.equals(meta, that.meta) + && Objects.equals(roles, that.roles) + && Objects.equals(schemas, that.schemas); + } + + @Override + public int hashCode() { + return Objects.hash( + displayName, entitlements, externalId, groups, id, members, meta, roles, schemas); + } + + @Override + public String toString() { + return new ToStringer(GroupPb.class) + .add("displayName", displayName) + .add("entitlements", entitlements) + .add("externalId", externalId) + .add("groups", groups) + .add("id", id) + .add("members", members) + .add("meta", meta) + .add("roles", roles) + .add("schemas", schemas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java index 01a61454b..a6b7414d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java @@ -75,7 +75,7 @@ public Group get(GetGroupRequest request) { public Iterable list(ListGroupsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java index 282689a35..f840d4060 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java @@ -21,7 +21,7 @@ public Group create(Group request) { String path = "/api/2.0/preview/scim/v2/Groups"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Group.class); @@ -35,7 +35,7 @@ public void delete(DeleteGroupRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public Group get(GetGroupRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Group.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListGroupsResponse list(ListGroupsRequest request) { String path = "/api/2.0/preview/scim/v2/Groups"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListGroupsResponse.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public void update(Group request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java index 0561e6669..994668142 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List group details. */ @Generated +@JsonSerialize(using = ListAccountGroupsRequest.ListAccountGroupsRequestSerializer.class) +@JsonDeserialize(using = ListAccountGroupsRequest.ListAccountGroupsRequestDeserializer.class) public class ListAccountGroupsRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. Default is 10000. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,23 +38,15 @@ public class ListAccountGroupsRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Attribute to sort the results. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListAccountGroupsRequest setAttributes(String attributes) { @@ -148,4 +144,53 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListAccountGroupsRequestPb toPb() { + ListAccountGroupsRequestPb pb = new ListAccountGroupsRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListAccountGroupsRequest fromPb(ListAccountGroupsRequestPb pb) { + ListAccountGroupsRequest model = new ListAccountGroupsRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListAccountGroupsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountGroupsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountGroupsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountGroupsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountGroupsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountGroupsRequestPb pb = mapper.readValue(p, ListAccountGroupsRequestPb.class); + return ListAccountGroupsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequestPb.java new file mode 100755 index 000000000..75b977ed0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List group details. */ +@Generated +class ListAccountGroupsRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListAccountGroupsRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListAccountGroupsRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListAccountGroupsRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListAccountGroupsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListAccountGroupsRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListAccountGroupsRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListAccountGroupsRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountGroupsRequestPb that = (ListAccountGroupsRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListAccountGroupsRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java index 23f3255c5..17c507778 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java @@ -3,27 +3,34 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List service principals. */ @Generated +@JsonSerialize( + using = ListAccountServicePrincipalsRequest.ListAccountServicePrincipalsRequestSerializer.class) +@JsonDeserialize( + using = + ListAccountServicePrincipalsRequest.ListAccountServicePrincipalsRequestDeserializer.class) public class ListAccountServicePrincipalsRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. Default is 10000. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,23 +41,15 @@ public class ListAccountServicePrincipalsRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Attribute to sort the results. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListAccountServicePrincipalsRequest setAttributes(String attributes) { @@ -148,4 +147,54 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListAccountServicePrincipalsRequestPb toPb() { + ListAccountServicePrincipalsRequestPb pb = new ListAccountServicePrincipalsRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListAccountServicePrincipalsRequest fromPb(ListAccountServicePrincipalsRequestPb pb) { + ListAccountServicePrincipalsRequest model = new ListAccountServicePrincipalsRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListAccountServicePrincipalsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountServicePrincipalsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountServicePrincipalsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountServicePrincipalsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountServicePrincipalsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountServicePrincipalsRequestPb pb = + mapper.readValue(p, ListAccountServicePrincipalsRequestPb.class); + return ListAccountServicePrincipalsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequestPb.java new file mode 100755 index 000000000..3eacefbdf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List service principals. */ +@Generated +class ListAccountServicePrincipalsRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListAccountServicePrincipalsRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListAccountServicePrincipalsRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListAccountServicePrincipalsRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListAccountServicePrincipalsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListAccountServicePrincipalsRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListAccountServicePrincipalsRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListAccountServicePrincipalsRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountServicePrincipalsRequestPb that = (ListAccountServicePrincipalsRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListAccountServicePrincipalsRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java index 4615abfbd..927c3bc5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List users. */ @Generated +@JsonSerialize(using = ListAccountUsersRequest.ListAccountUsersRequestSerializer.class) +@JsonDeserialize(using = ListAccountUsersRequest.ListAccountUsersRequestDeserializer.class) public class ListAccountUsersRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. Default is 10000. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,26 +38,18 @@ public class ListAccountUsersRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** * Attribute to sort the results. Multi-part paths are supported. For example, `userName`, * `name.givenName`, and `emails`. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListAccountUsersRequest setAttributes(String attributes) { @@ -151,4 +147,53 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListAccountUsersRequestPb toPb() { + ListAccountUsersRequestPb pb = new ListAccountUsersRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListAccountUsersRequest fromPb(ListAccountUsersRequestPb pb) { + ListAccountUsersRequest model = new ListAccountUsersRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListAccountUsersRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountUsersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountUsersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountUsersRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountUsersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountUsersRequestPb pb = mapper.readValue(p, ListAccountUsersRequestPb.class); + return ListAccountUsersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequestPb.java new file mode 100755 index 000000000..fd2e3da9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List users. */ +@Generated +class ListAccountUsersRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListAccountUsersRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListAccountUsersRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListAccountUsersRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListAccountUsersRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListAccountUsersRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListAccountUsersRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListAccountUsersRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountUsersRequestPb that = (ListAccountUsersRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListAccountUsersRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java index 85f9e831d..829407332 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List group details. */ @Generated +@JsonSerialize(using = ListGroupsRequest.ListGroupsRequestSerializer.class) +@JsonDeserialize(using = ListGroupsRequest.ListGroupsRequestDeserializer.class) public class ListGroupsRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,23 +38,15 @@ public class ListGroupsRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Attribute to sort the results. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListGroupsRequest setAttributes(String attributes) { @@ -148,4 +144,50 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListGroupsRequestPb toPb() { + ListGroupsRequestPb pb = new ListGroupsRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListGroupsRequest fromPb(ListGroupsRequestPb pb) { + ListGroupsRequest model = new ListGroupsRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListGroupsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListGroupsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListGroupsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListGroupsRequestDeserializer extends JsonDeserializer { + @Override + public ListGroupsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListGroupsRequestPb pb = mapper.readValue(p, ListGroupsRequestPb.class); + return ListGroupsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequestPb.java new file mode 100755 index 000000000..407128dcb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List group details. */ +@Generated +class ListGroupsRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListGroupsRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListGroupsRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListGroupsRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListGroupsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListGroupsRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListGroupsRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListGroupsRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGroupsRequestPb that = (ListGroupsRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListGroupsRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponse.java index 41877be1d..3bdb5834a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListGroupsResponse.ListGroupsResponseSerializer.class) +@JsonDeserialize(using = ListGroupsResponse.ListGroupsResponseDeserializer.class) public class ListGroupsResponse { /** Total results returned in the response. */ - @JsonProperty("itemsPerPage") private Long itemsPerPage; /** User objects returned in the response. */ - @JsonProperty("Resources") private Collection resources; /** The schema of the service principal. */ - @JsonProperty("schemas") private Collection schemas; /** Starting index of all the results that matched the request filters. First item is number 1. */ - @JsonProperty("startIndex") private Long startIndex; /** Total results that match the request filters. */ - @JsonProperty("totalResults") private Long totalResults; public ListGroupsResponse setItemsPerPage(Long itemsPerPage) { @@ -102,4 +108,46 @@ public String toString() { .add("totalResults", totalResults) .toString(); } + + ListGroupsResponsePb toPb() { + ListGroupsResponsePb pb = new ListGroupsResponsePb(); + pb.setItemsPerPage(itemsPerPage); + pb.setResources(resources); + pb.setSchemas(schemas); + pb.setStartIndex(startIndex); + pb.setTotalResults(totalResults); + + return pb; + } + + static ListGroupsResponse fromPb(ListGroupsResponsePb pb) { + ListGroupsResponse model = new ListGroupsResponse(); + model.setItemsPerPage(pb.getItemsPerPage()); + model.setResources(pb.getResources()); + model.setSchemas(pb.getSchemas()); + model.setStartIndex(pb.getStartIndex()); + model.setTotalResults(pb.getTotalResults()); + + return model; + } + + public static class ListGroupsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListGroupsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListGroupsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListGroupsResponseDeserializer extends JsonDeserializer { + @Override + public ListGroupsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListGroupsResponsePb pb = mapper.readValue(p, ListGroupsResponsePb.class); + return ListGroupsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponsePb.java new file mode 100755 index 000000000..f812d59d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListGroupsResponsePb { + @JsonProperty("itemsPerPage") + private Long itemsPerPage; + + @JsonProperty("Resources") + private Collection resources; + + @JsonProperty("schemas") + private Collection schemas; + + @JsonProperty("startIndex") + private Long startIndex; + + @JsonProperty("totalResults") + private Long totalResults; + + public ListGroupsResponsePb setItemsPerPage(Long itemsPerPage) { + this.itemsPerPage = itemsPerPage; + return this; + } + + public Long getItemsPerPage() { + return itemsPerPage; + } + + public ListGroupsResponsePb setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + public ListGroupsResponsePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + public ListGroupsResponsePb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + public ListGroupsResponsePb setTotalResults(Long totalResults) { + this.totalResults = totalResults; + return this; + } + + public Long getTotalResults() { + return totalResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGroupsResponsePb that = (ListGroupsResponsePb) o; + return Objects.equals(itemsPerPage, that.itemsPerPage) + && Objects.equals(resources, that.resources) + && Objects.equals(schemas, that.schemas) + && Objects.equals(startIndex, that.startIndex) + && Objects.equals(totalResults, that.totalResults); + } + + @Override + public int hashCode() { + return Objects.hash(itemsPerPage, resources, schemas, startIndex, totalResults); + } + + @Override + public String toString() { + return new ToStringer(ListGroupsResponsePb.class) + .add("itemsPerPage", itemsPerPage) + .add("resources", resources) + .add("schemas", schemas) + .add("startIndex", startIndex) + .add("totalResults", totalResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponse.java index 0d8821afd..37f178a71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponse.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListServicePrincipalResponse.ListServicePrincipalResponseSerializer.class) +@JsonDeserialize( + using = ListServicePrincipalResponse.ListServicePrincipalResponseDeserializer.class) public class ListServicePrincipalResponse { /** Total results returned in the response. */ - @JsonProperty("itemsPerPage") private Long itemsPerPage; /** User objects returned in the response. */ - @JsonProperty("Resources") private Collection resources; /** The schema of the List response. */ - @JsonProperty("schemas") private Collection schemas; /** Starting index of all the results that matched the request filters. First item is number 1. */ - @JsonProperty("startIndex") private Long startIndex; /** Total results that match the request filters. */ - @JsonProperty("totalResults") private Long totalResults; public ListServicePrincipalResponse setItemsPerPage(Long itemsPerPage) { @@ -102,4 +109,49 @@ public String toString() { .add("totalResults", totalResults) .toString(); } + + ListServicePrincipalResponsePb toPb() { + ListServicePrincipalResponsePb pb = new ListServicePrincipalResponsePb(); + pb.setItemsPerPage(itemsPerPage); + pb.setResources(resources); + pb.setSchemas(schemas); + pb.setStartIndex(startIndex); + pb.setTotalResults(totalResults); + + return pb; + } + + static ListServicePrincipalResponse fromPb(ListServicePrincipalResponsePb pb) { + ListServicePrincipalResponse model = new ListServicePrincipalResponse(); + model.setItemsPerPage(pb.getItemsPerPage()); + model.setResources(pb.getResources()); + model.setSchemas(pb.getSchemas()); + model.setStartIndex(pb.getStartIndex()); + model.setTotalResults(pb.getTotalResults()); + + return model; + } + + public static class ListServicePrincipalResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListServicePrincipalResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListServicePrincipalResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListServicePrincipalResponseDeserializer + extends JsonDeserializer { + @Override + public ListServicePrincipalResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListServicePrincipalResponsePb pb = mapper.readValue(p, ListServicePrincipalResponsePb.class); + return ListServicePrincipalResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponsePb.java new file mode 100755 index 000000000..6c8a68970 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListServicePrincipalResponsePb { + @JsonProperty("itemsPerPage") + private Long itemsPerPage; + + @JsonProperty("Resources") + private Collection resources; + + @JsonProperty("schemas") + private Collection schemas; + + @JsonProperty("startIndex") + private Long startIndex; + + @JsonProperty("totalResults") + private Long totalResults; + + public ListServicePrincipalResponsePb setItemsPerPage(Long itemsPerPage) { + this.itemsPerPage = itemsPerPage; + return this; + } + + public Long getItemsPerPage() { + return itemsPerPage; + } + + public ListServicePrincipalResponsePb setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + public ListServicePrincipalResponsePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + public ListServicePrincipalResponsePb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + public ListServicePrincipalResponsePb setTotalResults(Long totalResults) { + this.totalResults = totalResults; + return this; + } + + public Long getTotalResults() { + return totalResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalResponsePb that = (ListServicePrincipalResponsePb) o; + return Objects.equals(itemsPerPage, that.itemsPerPage) + && Objects.equals(resources, that.resources) + && Objects.equals(schemas, that.schemas) + && Objects.equals(startIndex, that.startIndex) + && Objects.equals(totalResults, that.totalResults); + } + + @Override + public int hashCode() { + return Objects.hash(itemsPerPage, resources, schemas, startIndex, totalResults); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalResponsePb.class) + .add("itemsPerPage", itemsPerPage) + .add("resources", resources) + .add("schemas", schemas) + .add("startIndex", startIndex) + .add("totalResults", totalResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java index 35003ac3c..3fd35dc42 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java @@ -3,27 +3,32 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List service principals. */ @Generated +@JsonSerialize(using = ListServicePrincipalsRequest.ListServicePrincipalsRequestSerializer.class) +@JsonDeserialize( + using = ListServicePrincipalsRequest.ListServicePrincipalsRequestDeserializer.class) public class ListServicePrincipalsRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,23 +39,15 @@ public class ListServicePrincipalsRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Attribute to sort the results. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListServicePrincipalsRequest setAttributes(String attributes) { @@ -148,4 +145,53 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListServicePrincipalsRequestPb toPb() { + ListServicePrincipalsRequestPb pb = new ListServicePrincipalsRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListServicePrincipalsRequest fromPb(ListServicePrincipalsRequestPb pb) { + ListServicePrincipalsRequest model = new ListServicePrincipalsRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListServicePrincipalsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListServicePrincipalsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListServicePrincipalsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListServicePrincipalsRequestDeserializer + extends JsonDeserializer { + @Override + public ListServicePrincipalsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListServicePrincipalsRequestPb pb = mapper.readValue(p, ListServicePrincipalsRequestPb.class); + return ListServicePrincipalsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequestPb.java new file mode 100755 index 000000000..e0a510fbd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List service principals. */ +@Generated +class ListServicePrincipalsRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListServicePrincipalsRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListServicePrincipalsRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListServicePrincipalsRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListServicePrincipalsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListServicePrincipalsRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListServicePrincipalsRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListServicePrincipalsRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalsRequestPb that = (ListServicePrincipalsRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalsRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java index 373ad0552..879aeb0d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List users. */ @Generated +@JsonSerialize(using = ListUsersRequest.ListUsersRequestSerializer.class) +@JsonDeserialize(using = ListUsersRequest.ListUsersRequestDeserializer.class) public class ListUsersRequest { /** Comma-separated list of attributes to return in response. */ - @JsonIgnore - @QueryParam("attributes") private String attributes; /** Desired number of results per page. */ - @JsonIgnore - @QueryParam("count") private Long count; /** Comma-separated list of attributes to exclude in response. */ - @JsonIgnore - @QueryParam("excludedAttributes") private String excludedAttributes; /** @@ -34,26 +38,18 @@ public class ListUsersRequest { * *

[SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 */ - @JsonIgnore - @QueryParam("filter") private String filter; /** * Attribute to sort the results. Multi-part paths are supported. For example, `userName`, * `name.givenName`, and `emails`. */ - @JsonIgnore - @QueryParam("sortBy") private String sortBy; /** The order to sort the results. */ - @JsonIgnore - @QueryParam("sortOrder") private ListSortOrder sortOrder; /** Specifies the index of the first result. First item is number 1. */ - @JsonIgnore - @QueryParam("startIndex") private Long startIndex; public ListUsersRequest setAttributes(String attributes) { @@ -151,4 +147,50 @@ public String toString() { .add("startIndex", startIndex) .toString(); } + + ListUsersRequestPb toPb() { + ListUsersRequestPb pb = new ListUsersRequestPb(); + pb.setAttributes(attributes); + pb.setCount(count); + pb.setExcludedAttributes(excludedAttributes); + pb.setFilter(filter); + pb.setSortBy(sortBy); + pb.setSortOrder(sortOrder); + pb.setStartIndex(startIndex); + + return pb; + } + + static ListUsersRequest fromPb(ListUsersRequestPb pb) { + ListUsersRequest model = new ListUsersRequest(); + model.setAttributes(pb.getAttributes()); + model.setCount(pb.getCount()); + model.setExcludedAttributes(pb.getExcludedAttributes()); + model.setFilter(pb.getFilter()); + model.setSortBy(pb.getSortBy()); + model.setSortOrder(pb.getSortOrder()); + model.setStartIndex(pb.getStartIndex()); + + return model; + } + + public static class ListUsersRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListUsersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListUsersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListUsersRequestDeserializer extends JsonDeserializer { + @Override + public ListUsersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListUsersRequestPb pb = mapper.readValue(p, ListUsersRequestPb.class); + return ListUsersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequestPb.java new file mode 100755 index 000000000..4073338ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequestPb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List users. */ +@Generated +class ListUsersRequestPb { + @JsonIgnore + @QueryParam("attributes") + private String attributes; + + @JsonIgnore + @QueryParam("count") + private Long count; + + @JsonIgnore + @QueryParam("excludedAttributes") + private String excludedAttributes; + + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("sortBy") + private String sortBy; + + @JsonIgnore + @QueryParam("sortOrder") + private ListSortOrder sortOrder; + + @JsonIgnore + @QueryParam("startIndex") + private Long startIndex; + + public ListUsersRequestPb setAttributes(String attributes) { + this.attributes = attributes; + return this; + } + + public String getAttributes() { + return attributes; + } + + public ListUsersRequestPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListUsersRequestPb setExcludedAttributes(String excludedAttributes) { + this.excludedAttributes = excludedAttributes; + return this; + } + + public String getExcludedAttributes() { + return excludedAttributes; + } + + public ListUsersRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListUsersRequestPb setSortBy(String sortBy) { + this.sortBy = sortBy; + return this; + } + + public String getSortBy() { + return sortBy; + } + + public ListUsersRequestPb setSortOrder(ListSortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public ListSortOrder getSortOrder() { + return sortOrder; + } + + public ListUsersRequestPb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsersRequestPb that = (ListUsersRequestPb) o; + return Objects.equals(attributes, that.attributes) + && Objects.equals(count, that.count) + && Objects.equals(excludedAttributes, that.excludedAttributes) + && Objects.equals(filter, that.filter) + && Objects.equals(sortBy, that.sortBy) + && Objects.equals(sortOrder, that.sortOrder) + && Objects.equals(startIndex, that.startIndex); + } + + @Override + public int hashCode() { + return Objects.hash( + attributes, count, excludedAttributes, filter, sortBy, sortOrder, startIndex); + } + + @Override + public String toString() { + return new ToStringer(ListUsersRequestPb.class) + .add("attributes", attributes) + .add("count", count) + .add("excludedAttributes", excludedAttributes) + .add("filter", filter) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .add("startIndex", startIndex) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponse.java index 9d1a07e9c..16b9f7970 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListUsersResponse.ListUsersResponseSerializer.class) +@JsonDeserialize(using = ListUsersResponse.ListUsersResponseDeserializer.class) public class ListUsersResponse { /** Total results returned in the response. */ - @JsonProperty("itemsPerPage") private Long itemsPerPage; /** User objects returned in the response. */ - @JsonProperty("Resources") private Collection resources; /** The schema of the List response. */ - @JsonProperty("schemas") private Collection schemas; /** Starting index of all the results that matched the request filters. First item is number 1. */ - @JsonProperty("startIndex") private Long startIndex; /** Total results that match the request filters. */ - @JsonProperty("totalResults") private Long totalResults; public ListUsersResponse setItemsPerPage(Long itemsPerPage) { @@ -102,4 +108,46 @@ public String toString() { .add("totalResults", totalResults) .toString(); } + + ListUsersResponsePb toPb() { + ListUsersResponsePb pb = new ListUsersResponsePb(); + pb.setItemsPerPage(itemsPerPage); + pb.setResources(resources); + pb.setSchemas(schemas); + pb.setStartIndex(startIndex); + pb.setTotalResults(totalResults); + + return pb; + } + + static ListUsersResponse fromPb(ListUsersResponsePb pb) { + ListUsersResponse model = new ListUsersResponse(); + model.setItemsPerPage(pb.getItemsPerPage()); + model.setResources(pb.getResources()); + model.setSchemas(pb.getSchemas()); + model.setStartIndex(pb.getStartIndex()); + model.setTotalResults(pb.getTotalResults()); + + return model; + } + + public static class ListUsersResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListUsersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListUsersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListUsersResponseDeserializer extends JsonDeserializer { + @Override + public ListUsersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListUsersResponsePb pb = mapper.readValue(p, ListUsersResponsePb.class); + return ListUsersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponsePb.java new file mode 100755 index 000000000..2ec4b41d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListUsersResponsePb { + @JsonProperty("itemsPerPage") + private Long itemsPerPage; + + @JsonProperty("Resources") + private Collection resources; + + @JsonProperty("schemas") + private Collection schemas; + + @JsonProperty("startIndex") + private Long startIndex; + + @JsonProperty("totalResults") + private Long totalResults; + + public ListUsersResponsePb setItemsPerPage(Long itemsPerPage) { + this.itemsPerPage = itemsPerPage; + return this; + } + + public Long getItemsPerPage() { + return itemsPerPage; + } + + public ListUsersResponsePb setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + public ListUsersResponsePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + public ListUsersResponsePb setStartIndex(Long startIndex) { + this.startIndex = startIndex; + return this; + } + + public Long getStartIndex() { + return startIndex; + } + + public ListUsersResponsePb setTotalResults(Long totalResults) { + this.totalResults = totalResults; + return this; + } + + public Long getTotalResults() { + return totalResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsersResponsePb that = (ListUsersResponsePb) o; + return Objects.equals(itemsPerPage, that.itemsPerPage) + && Objects.equals(resources, that.resources) + && Objects.equals(schemas, that.schemas) + && Objects.equals(startIndex, that.startIndex) + && Objects.equals(totalResults, that.totalResults); + } + + @Override + public int hashCode() { + return Objects.hash(itemsPerPage, resources, schemas, startIndex, totalResults); + } + + @Override + public String toString() { + return new ToStringer(ListUsersResponsePb.class) + .add("itemsPerPage", itemsPerPage) + .add("resources", resources) + .add("schemas", schemas) + .add("startIndex", startIndex) + .add("totalResults", totalResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequest.java index 5b6aaa9c9..2e571f3ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get permission assignments */ @Generated +@JsonSerialize( + using = ListWorkspaceAssignmentRequest.ListWorkspaceAssignmentRequestSerializer.class) +@JsonDeserialize( + using = ListWorkspaceAssignmentRequest.ListWorkspaceAssignmentRequestDeserializer.class) public class ListWorkspaceAssignmentRequest { /** The workspace ID for the account. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public ListWorkspaceAssignmentRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -41,4 +54,42 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + ListWorkspaceAssignmentRequestPb toPb() { + ListWorkspaceAssignmentRequestPb pb = new ListWorkspaceAssignmentRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static ListWorkspaceAssignmentRequest fromPb(ListWorkspaceAssignmentRequestPb pb) { + ListWorkspaceAssignmentRequest model = new ListWorkspaceAssignmentRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class ListWorkspaceAssignmentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListWorkspaceAssignmentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListWorkspaceAssignmentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListWorkspaceAssignmentRequestDeserializer + extends JsonDeserializer { + @Override + public ListWorkspaceAssignmentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListWorkspaceAssignmentRequestPb pb = + mapper.readValue(p, ListWorkspaceAssignmentRequestPb.class); + return ListWorkspaceAssignmentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequestPb.java new file mode 100755 index 000000000..47cc5184e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListWorkspaceAssignmentRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get permission assignments */ +@Generated +class ListWorkspaceAssignmentRequestPb { + @JsonIgnore private Long workspaceId; + + public ListWorkspaceAssignmentRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceAssignmentRequestPb that = (ListWorkspaceAssignmentRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceAssignmentRequestPb.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequest.java index 847768c67..2a85ab0dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MigratePermissionsRequest.MigratePermissionsRequestSerializer.class) +@JsonDeserialize(using = MigratePermissionsRequest.MigratePermissionsRequestDeserializer.class) public class MigratePermissionsRequest { /** The name of the workspace group that permissions will be migrated from. */ - @JsonProperty("from_workspace_group_name") private String fromWorkspaceGroupName; /** The maximum number of permissions that will be migrated. */ - @JsonProperty("size") private Long size; /** The name of the account group that permissions will be migrated to. */ - @JsonProperty("to_account_group_name") private String toAccountGroupName; /** WorkspaceId of the associated workspace where the permission migration will occur. */ - @JsonProperty("workspace_id") private Long workspaceId; public MigratePermissionsRequest setFromWorkspaceGroupName(String fromWorkspaceGroupName) { @@ -86,4 +93,47 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + MigratePermissionsRequestPb toPb() { + MigratePermissionsRequestPb pb = new MigratePermissionsRequestPb(); + pb.setFromWorkspaceGroupName(fromWorkspaceGroupName); + pb.setSize(size); + pb.setToAccountGroupName(toAccountGroupName); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static MigratePermissionsRequest fromPb(MigratePermissionsRequestPb pb) { + MigratePermissionsRequest model = new MigratePermissionsRequest(); + model.setFromWorkspaceGroupName(pb.getFromWorkspaceGroupName()); + model.setSize(pb.getSize()); + model.setToAccountGroupName(pb.getToAccountGroupName()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class MigratePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + MigratePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MigratePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MigratePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public MigratePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MigratePermissionsRequestPb pb = mapper.readValue(p, MigratePermissionsRequestPb.class); + return MigratePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequestPb.java new file mode 100755 index 000000000..65bfe27ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MigratePermissionsRequestPb { + @JsonProperty("from_workspace_group_name") + private String fromWorkspaceGroupName; + + @JsonProperty("size") + private Long size; + + @JsonProperty("to_account_group_name") + private String toAccountGroupName; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public MigratePermissionsRequestPb setFromWorkspaceGroupName(String fromWorkspaceGroupName) { + this.fromWorkspaceGroupName = fromWorkspaceGroupName; + return this; + } + + public String getFromWorkspaceGroupName() { + return fromWorkspaceGroupName; + } + + public MigratePermissionsRequestPb setSize(Long size) { + this.size = size; + return this; + } + + public Long getSize() { + return size; + } + + public MigratePermissionsRequestPb setToAccountGroupName(String toAccountGroupName) { + this.toAccountGroupName = toAccountGroupName; + return this; + } + + public String getToAccountGroupName() { + return toAccountGroupName; + } + + public MigratePermissionsRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MigratePermissionsRequestPb that = (MigratePermissionsRequestPb) o; + return Objects.equals(fromWorkspaceGroupName, that.fromWorkspaceGroupName) + && Objects.equals(size, that.size) + && Objects.equals(toAccountGroupName, that.toAccountGroupName) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(fromWorkspaceGroupName, size, toAccountGroupName, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(MigratePermissionsRequestPb.class) + .add("fromWorkspaceGroupName", fromWorkspaceGroupName) + .add("size", size) + .add("toAccountGroupName", toAccountGroupName) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponse.java index 3dd131f9c..db6191436 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MigratePermissionsResponse.MigratePermissionsResponseSerializer.class) +@JsonDeserialize(using = MigratePermissionsResponse.MigratePermissionsResponseDeserializer.class) public class MigratePermissionsResponse { /** Number of permissions migrated. */ - @JsonProperty("permissions_migrated") private Long permissionsMigrated; public MigratePermissionsResponse setPermissionsMigrated(Long permissionsMigrated) { @@ -41,4 +51,41 @@ public String toString() { .add("permissionsMigrated", permissionsMigrated) .toString(); } + + MigratePermissionsResponsePb toPb() { + MigratePermissionsResponsePb pb = new MigratePermissionsResponsePb(); + pb.setPermissionsMigrated(permissionsMigrated); + + return pb; + } + + static MigratePermissionsResponse fromPb(MigratePermissionsResponsePb pb) { + MigratePermissionsResponse model = new MigratePermissionsResponse(); + model.setPermissionsMigrated(pb.getPermissionsMigrated()); + + return model; + } + + public static class MigratePermissionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + MigratePermissionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MigratePermissionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MigratePermissionsResponseDeserializer + extends JsonDeserializer { + @Override + public MigratePermissionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MigratePermissionsResponsePb pb = mapper.readValue(p, MigratePermissionsResponsePb.class); + return MigratePermissionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponsePb.java new file mode 100755 index 000000000..f7d895d97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/MigratePermissionsResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MigratePermissionsResponsePb { + @JsonProperty("permissions_migrated") + private Long permissionsMigrated; + + public MigratePermissionsResponsePb setPermissionsMigrated(Long permissionsMigrated) { + this.permissionsMigrated = permissionsMigrated; + return this; + } + + public Long getPermissionsMigrated() { + return permissionsMigrated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MigratePermissionsResponsePb that = (MigratePermissionsResponsePb) o; + return Objects.equals(permissionsMigrated, that.permissionsMigrated); + } + + @Override + public int hashCode() { + return Objects.hash(permissionsMigrated); + } + + @Override + public String toString() { + return new ToStringer(MigratePermissionsResponsePb.class) + .add("permissionsMigrated", permissionsMigrated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Name.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Name.java index b6162e05e..e43fcff15 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Name.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Name.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Name.NameSerializer.class) +@JsonDeserialize(using = Name.NameDeserializer.class) public class Name { /** Family name of the Databricks user. */ - @JsonProperty("familyName") private String familyName; /** Given name of the Databricks user. */ - @JsonProperty("givenName") private String givenName; public Name setFamilyName(String familyName) { @@ -55,4 +64,39 @@ public String toString() { .add("givenName", givenName) .toString(); } + + NamePb toPb() { + NamePb pb = new NamePb(); + pb.setFamilyName(familyName); + pb.setGivenName(givenName); + + return pb; + } + + static Name fromPb(NamePb pb) { + Name model = new Name(); + model.setFamilyName(pb.getFamilyName()); + model.setGivenName(pb.getGivenName()); + + return model; + } + + public static class NameSerializer extends JsonSerializer { + @Override + public void serialize(Name value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NamePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NameDeserializer extends JsonDeserializer { + @Override + public Name deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NamePb pb = mapper.readValue(p, NamePb.class); + return Name.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/NamePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/NamePb.java new file mode 100755 index 000000000..86f3f2151 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/NamePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NamePb { + @JsonProperty("familyName") + private String familyName; + + @JsonProperty("givenName") + private String givenName; + + public NamePb setFamilyName(String familyName) { + this.familyName = familyName; + return this; + } + + public String getFamilyName() { + return familyName; + } + + public NamePb setGivenName(String givenName) { + this.givenName = givenName; + return this; + } + + public String getGivenName() { + return givenName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NamePb that = (NamePb) o; + return Objects.equals(familyName, that.familyName) && Objects.equals(givenName, that.givenName); + } + + @Override + public int hashCode() { + return Objects.hash(familyName, givenName); + } + + @Override + public String toString() { + return new ToStringer(NamePb.class) + .add("familyName", familyName) + .add("givenName", givenName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissions.java index 7b8952be7..9dd007b81 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ObjectPermissions.ObjectPermissionsSerializer.class) +@JsonDeserialize(using = ObjectPermissions.ObjectPermissionsDeserializer.class) public class ObjectPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public ObjectPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + ObjectPermissionsPb toPb() { + ObjectPermissionsPb pb = new ObjectPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static ObjectPermissions fromPb(ObjectPermissionsPb pb) { + ObjectPermissions model = new ObjectPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class ObjectPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(ObjectPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ObjectPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ObjectPermissionsDeserializer extends JsonDeserializer { + @Override + public ObjectPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ObjectPermissionsPb pb = mapper.readValue(p, ObjectPermissionsPb.class); + return ObjectPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissionsPb.java new file mode 100755 index 000000000..6c767a612 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ObjectPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ObjectPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public ObjectPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ObjectPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ObjectPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ObjectPermissionsPb that = (ObjectPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(ObjectPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java index 8d3ad7796..19a1ed2a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PartialUpdate.PartialUpdateSerializer.class) +@JsonDeserialize(using = PartialUpdate.PartialUpdateDeserializer.class) public class PartialUpdate { /** Unique ID in the Databricks workspace. */ - @JsonIgnore private String id; + private String id; /** */ - @JsonProperty("Operations") private Collection operations; /** The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. */ - @JsonProperty("schemas") private Collection schemas; public PartialUpdate setId(String id) { @@ -72,4 +80,41 @@ public String toString() { .add("schemas", schemas) .toString(); } + + PartialUpdatePb toPb() { + PartialUpdatePb pb = new PartialUpdatePb(); + pb.setId(id); + pb.setOperations(operations); + pb.setSchemas(schemas); + + return pb; + } + + static PartialUpdate fromPb(PartialUpdatePb pb) { + PartialUpdate model = new PartialUpdate(); + model.setId(pb.getId()); + model.setOperations(pb.getOperations()); + model.setSchemas(pb.getSchemas()); + + return model; + } + + public static class PartialUpdateSerializer extends JsonSerializer { + @Override + public void serialize(PartialUpdate value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PartialUpdatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PartialUpdateDeserializer extends JsonDeserializer { + @Override + public PartialUpdate deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PartialUpdatePb pb = mapper.readValue(p, PartialUpdatePb.class); + return PartialUpdate.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdatePb.java new file mode 100755 index 000000000..606bf0578 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdatePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PartialUpdatePb { + @JsonIgnore private String id; + + @JsonProperty("Operations") + private Collection operations; + + @JsonProperty("schemas") + private Collection schemas; + + public PartialUpdatePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public PartialUpdatePb setOperations(Collection operations) { + this.operations = operations; + return this; + } + + public Collection getOperations() { + return operations; + } + + public PartialUpdatePb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PartialUpdatePb that = (PartialUpdatePb) o; + return Objects.equals(id, that.id) + && Objects.equals(operations, that.operations) + && Objects.equals(schemas, that.schemas); + } + + @Override + public int hashCode() { + return Objects.hash(id, operations, schemas); + } + + @Override + public String toString() { + return new ToStringer(PartialUpdatePb.class) + .add("id", id) + .add("operations", operations) + .add("schemas", schemas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java index b9558a0c7..e60ec4eee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PasswordAccessControlRequest.PasswordAccessControlRequestSerializer.class) +@JsonDeserialize( + using = PasswordAccessControlRequest.PasswordAccessControlRequestDeserializer.class) public class PasswordAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private PasswordPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public PasswordAccessControlRequest setGroupName(String groupName) { @@ -86,4 +94,47 @@ public String toString() { .add("userName", userName) .toString(); } + + PasswordAccessControlRequestPb toPb() { + PasswordAccessControlRequestPb pb = new PasswordAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static PasswordAccessControlRequest fromPb(PasswordAccessControlRequestPb pb) { + PasswordAccessControlRequest model = new PasswordAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class PasswordAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PasswordAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public PasswordAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordAccessControlRequestPb pb = mapper.readValue(p, PasswordAccessControlRequestPb.class); + return PasswordAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequestPb.java new file mode 100755 index 000000000..a3341e105 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PasswordAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private PasswordPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public PasswordAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public PasswordAccessControlRequestPb setPermissionLevel( + PasswordPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PasswordPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public PasswordAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public PasswordAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordAccessControlRequestPb that = (PasswordAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(PasswordAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponse.java index b660079af..f40200f74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponse.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PasswordAccessControlResponse.PasswordAccessControlResponseSerializer.class) +@JsonDeserialize( + using = PasswordAccessControlResponse.PasswordAccessControlResponseDeserializer.class) public class PasswordAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public PasswordAccessControlResponse setAllPermissions( @@ -103,4 +110,50 @@ public String toString() { .add("userName", userName) .toString(); } + + PasswordAccessControlResponsePb toPb() { + PasswordAccessControlResponsePb pb = new PasswordAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static PasswordAccessControlResponse fromPb(PasswordAccessControlResponsePb pb) { + PasswordAccessControlResponse model = new PasswordAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class PasswordAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + PasswordAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public PasswordAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordAccessControlResponsePb pb = + mapper.readValue(p, PasswordAccessControlResponsePb.class); + return PasswordAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponsePb.java new file mode 100755 index 000000000..185bea182 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PasswordAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public PasswordAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public PasswordAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public PasswordAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public PasswordAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public PasswordAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordAccessControlResponsePb that = (PasswordAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(PasswordAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermission.java index dca28d798..8918063d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PasswordPermission.PasswordPermissionSerializer.class) +@JsonDeserialize(using = PasswordPermission.PasswordPermissionDeserializer.class) public class PasswordPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private PasswordPermissionLevel permissionLevel; public PasswordPermission setInherited(Boolean inherited) { @@ -72,4 +80,42 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PasswordPermissionPb toPb() { + PasswordPermissionPb pb = new PasswordPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PasswordPermission fromPb(PasswordPermissionPb pb) { + PasswordPermission model = new PasswordPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PasswordPermissionSerializer extends JsonSerializer { + @Override + public void serialize(PasswordPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordPermissionDeserializer extends JsonDeserializer { + @Override + public PasswordPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordPermissionPb pb = mapper.readValue(p, PasswordPermissionPb.class); + return PasswordPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionPb.java new file mode 100755 index 000000000..b3dcb7e50 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PasswordPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private PasswordPermissionLevel permissionLevel; + + public PasswordPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public PasswordPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public PasswordPermissionPb setPermissionLevel(PasswordPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PasswordPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordPermissionPb that = (PasswordPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PasswordPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissions.java index 47c509916..459547cff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PasswordPermissions.PasswordPermissionsSerializer.class) +@JsonDeserialize(using = PasswordPermissions.PasswordPermissionsDeserializer.class) public class PasswordPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public PasswordPermissions setAccessControlList( @@ -73,4 +81,43 @@ public String toString() { .add("objectType", objectType) .toString(); } + + PasswordPermissionsPb toPb() { + PasswordPermissionsPb pb = new PasswordPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static PasswordPermissions fromPb(PasswordPermissionsPb pb) { + PasswordPermissions model = new PasswordPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class PasswordPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(PasswordPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordPermissionsDeserializer + extends JsonDeserializer { + @Override + public PasswordPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordPermissionsPb pb = mapper.readValue(p, PasswordPermissionsPb.class); + return PasswordPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescription.java index 166becc50..c2877b3a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescription.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = PasswordPermissionsDescription.PasswordPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = PasswordPermissionsDescription.PasswordPermissionsDescriptionDeserializer.class) public class PasswordPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private PasswordPermissionLevel permissionLevel; public PasswordPermissionsDescription setDescription(String description) { @@ -57,4 +68,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PasswordPermissionsDescriptionPb toPb() { + PasswordPermissionsDescriptionPb pb = new PasswordPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PasswordPermissionsDescription fromPb(PasswordPermissionsDescriptionPb pb) { + PasswordPermissionsDescription model = new PasswordPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PasswordPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + PasswordPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public PasswordPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordPermissionsDescriptionPb pb = + mapper.readValue(p, PasswordPermissionsDescriptionPb.class); + return PasswordPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescriptionPb.java new file mode 100755 index 000000000..15a0d68a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PasswordPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private PasswordPermissionLevel permissionLevel; + + public PasswordPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PasswordPermissionsDescriptionPb setPermissionLevel( + PasswordPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PasswordPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordPermissionsDescriptionPb that = (PasswordPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PasswordPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsPb.java new file mode 100755 index 000000000..7dac958d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PasswordPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public PasswordPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public PasswordPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public PasswordPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordPermissionsPb that = (PasswordPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(PasswordPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java index 050918b43..daa56d4b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PasswordPermissionsRequest.PasswordPermissionsRequestSerializer.class) +@JsonDeserialize(using = PasswordPermissionsRequest.PasswordPermissionsRequestDeserializer.class) public class PasswordPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; public PasswordPermissionsRequest setAccessControlList( @@ -43,4 +53,41 @@ public String toString() { .add("accessControlList", accessControlList) .toString(); } + + PasswordPermissionsRequestPb toPb() { + PasswordPermissionsRequestPb pb = new PasswordPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + + return pb; + } + + static PasswordPermissionsRequest fromPb(PasswordPermissionsRequestPb pb) { + PasswordPermissionsRequest model = new PasswordPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + + return model; + } + + public static class PasswordPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PasswordPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PasswordPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PasswordPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public PasswordPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PasswordPermissionsRequestPb pb = mapper.readValue(p, PasswordPermissionsRequestPb.class); + return PasswordPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequestPb.java new file mode 100755 index 000000000..723ce93c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PasswordPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + public PasswordPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PasswordPermissionsRequestPb that = (PasswordPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList); + } + + @Override + public String toString() { + return new ToStringer(PasswordPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java index 4c0600981..2a7fff436 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Patch.PatchSerializer.class) +@JsonDeserialize(using = Patch.PatchDeserializer.class) public class Patch { /** Type of patch operation. */ - @JsonProperty("op") private PatchOp op; /** Selection of patch operation */ - @JsonProperty("path") private String path; /** Value to modify */ - @JsonProperty("value") private Object value; public Patch setOp(PatchOp op) { @@ -71,4 +79,41 @@ public String toString() { .add("value", value) .toString(); } + + PatchPb toPb() { + PatchPb pb = new PatchPb(); + pb.setOp(op); + pb.setPath(path); + pb.setValue(value); + + return pb; + } + + static Patch fromPb(PatchPb pb) { + Patch model = new Patch(); + model.setOp(pb.getOp()); + model.setPath(pb.getPath()); + model.setValue(pb.getValue()); + + return model; + } + + public static class PatchSerializer extends JsonSerializer { + @Override + public void serialize(Patch value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchDeserializer extends JsonDeserializer { + @Override + public Patch deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchPb pb = mapper.readValue(p, PatchPb.class); + return Patch.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchPb.java new file mode 100755 index 000000000..f3524973b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PatchPb { + @JsonProperty("op") + private PatchOp op; + + @JsonProperty("path") + private String path; + + @JsonProperty("value") + private Object value; + + public PatchPb setOp(PatchOp op) { + this.op = op; + return this; + } + + public PatchOp getOp() { + return op; + } + + public PatchPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public PatchPb setValue(Object value) { + this.value = value; + return this; + } + + public Object getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PatchPb that = (PatchPb) o; + return Objects.equals(op, that.op) + && Objects.equals(path, that.path) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(op, path, value); + } + + @Override + public String toString() { + return new ToStringer(PatchPb.class) + .add("op", op) + .add("path", path) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java index c5d81f464..75464bb63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PatchResponse.PatchResponseSerializer.class) +@JsonDeserialize(using = PatchResponse.PatchResponseDeserializer.class) public class PatchResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(PatchResponse.class).toString(); } + + PatchResponsePb toPb() { + PatchResponsePb pb = new PatchResponsePb(); + + return pb; + } + + static PatchResponse fromPb(PatchResponsePb pb) { + PatchResponse model = new PatchResponse(); + + return model; + } + + public static class PatchResponseSerializer extends JsonSerializer { + @Override + public void serialize(PatchResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchResponseDeserializer extends JsonDeserializer { + @Override + public PatchResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchResponsePb pb = mapper.readValue(p, PatchResponsePb.class); + return PatchResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponsePb.java new file mode 100755 index 000000000..8ed025cff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PatchResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PatchResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java index 71509c72f..8d06bc8a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Permission.PermissionSerializer.class) +@JsonDeserialize(using = Permission.PermissionDeserializer.class) public class Permission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; public Permission setInherited(Boolean inherited) { @@ -72,4 +80,41 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PermissionPb toPb() { + PermissionPb pb = new PermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static Permission fromPb(PermissionPb pb) { + Permission model = new Permission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PermissionSerializer extends JsonSerializer { + @Override + public void serialize(Permission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionDeserializer extends JsonDeserializer { + @Override + public Permission deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionPb pb = mapper.readValue(p, PermissionPb.class); + return Permission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java index 5965a5829..873b910b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,17 +22,16 @@ * for user consumption. */ @Generated +@JsonSerialize(using = PermissionAssignment.PermissionAssignmentSerializer.class) +@JsonDeserialize(using = PermissionAssignment.PermissionAssignmentDeserializer.class) public class PermissionAssignment { /** Error response associated with a workspace permission assignment, if any. */ - @JsonProperty("error") private String error; /** The permissions level of the principal. */ - @JsonProperty("permissions") private Collection permissions; /** Information about the principal assigned to the workspace. */ - @JsonProperty("principal") private PrincipalOutput principal; public PermissionAssignment setError(String error) { @@ -76,4 +84,44 @@ public String toString() { .add("principal", principal) .toString(); } + + PermissionAssignmentPb toPb() { + PermissionAssignmentPb pb = new PermissionAssignmentPb(); + pb.setError(error); + pb.setPermissions(permissions); + pb.setPrincipal(principal); + + return pb; + } + + static PermissionAssignment fromPb(PermissionAssignmentPb pb) { + PermissionAssignment model = new PermissionAssignment(); + model.setError(pb.getError()); + model.setPermissions(pb.getPermissions()); + model.setPrincipal(pb.getPrincipal()); + + return model; + } + + public static class PermissionAssignmentSerializer extends JsonSerializer { + @Override + public void serialize( + PermissionAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionAssignmentDeserializer + extends JsonDeserializer { + @Override + public PermissionAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionAssignmentPb pb = mapper.readValue(p, PermissionAssignmentPb.class); + return PermissionAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentPb.java new file mode 100755 index 000000000..f7d3dca03 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The output format for existing workspace PermissionAssignment records, which contains some info + * for user consumption. + */ +@Generated +class PermissionAssignmentPb { + @JsonProperty("error") + private String error; + + @JsonProperty("permissions") + private Collection permissions; + + @JsonProperty("principal") + private PrincipalOutput principal; + + public PermissionAssignmentPb setError(String error) { + this.error = error; + return this; + } + + public String getError() { + return error; + } + + public PermissionAssignmentPb setPermissions(Collection permissions) { + this.permissions = permissions; + return this; + } + + public Collection getPermissions() { + return permissions; + } + + public PermissionAssignmentPb setPrincipal(PrincipalOutput principal) { + this.principal = principal; + return this; + } + + public PrincipalOutput getPrincipal() { + return principal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionAssignmentPb that = (PermissionAssignmentPb) o; + return Objects.equals(error, that.error) + && Objects.equals(permissions, that.permissions) + && Objects.equals(principal, that.principal); + } + + @Override + public int hashCode() { + return Objects.hash(error, permissions, principal); + } + + @Override + public String toString() { + return new ToStringer(PermissionAssignmentPb.class) + .add("error", error) + .add("permissions", permissions) + .add("principal", principal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignments.java index 75970c4ec..234a0dfba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignments.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignments.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionAssignments.PermissionAssignmentsSerializer.class) +@JsonDeserialize(using = PermissionAssignments.PermissionAssignmentsDeserializer.class) public class PermissionAssignments { /** Array of permissions assignments defined for a workspace. */ - @JsonProperty("permission_assignments") private Collection permissionAssignments; public PermissionAssignments setPermissionAssignments( @@ -43,4 +53,41 @@ public String toString() { .add("permissionAssignments", permissionAssignments) .toString(); } + + PermissionAssignmentsPb toPb() { + PermissionAssignmentsPb pb = new PermissionAssignmentsPb(); + pb.setPermissionAssignments(permissionAssignments); + + return pb; + } + + static PermissionAssignments fromPb(PermissionAssignmentsPb pb) { + PermissionAssignments model = new PermissionAssignments(); + model.setPermissionAssignments(pb.getPermissionAssignments()); + + return model; + } + + public static class PermissionAssignmentsSerializer + extends JsonSerializer { + @Override + public void serialize( + PermissionAssignments value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionAssignmentsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionAssignmentsDeserializer + extends JsonDeserializer { + @Override + public PermissionAssignments deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionAssignmentsPb pb = mapper.readValue(p, PermissionAssignmentsPb.class); + return PermissionAssignments.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentsPb.java new file mode 100755 index 000000000..9c0d065be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignmentsPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PermissionAssignmentsPb { + @JsonProperty("permission_assignments") + private Collection permissionAssignments; + + public PermissionAssignmentsPb setPermissionAssignments( + Collection permissionAssignments) { + this.permissionAssignments = permissionAssignments; + return this; + } + + public Collection getPermissionAssignments() { + return permissionAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionAssignmentsPb that = (PermissionAssignmentsPb) o; + return Objects.equals(permissionAssignments, that.permissionAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(permissionAssignments); + } + + @Override + public String toString() { + return new ToStringer(PermissionAssignmentsPb.class) + .add("permissionAssignments", permissionAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java index ef88fff0e..96c829663 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java @@ -21,7 +21,7 @@ public MigratePermissionsResponse migratePermissions(MigratePermissionsRequest r String path = "/api/2.0/permissionmigration"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, MigratePermissionsResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutput.java index 9da49c4ce..3ae20fd5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutput.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionOutput.PermissionOutputSerializer.class) +@JsonDeserialize(using = PermissionOutput.PermissionOutputDeserializer.class) public class PermissionOutput { /** The results of a permissions query. */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("permission_level") private WorkspacePermission permissionLevel; public PermissionOutput setDescription(String description) { @@ -56,4 +65,40 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PermissionOutputPb toPb() { + PermissionOutputPb pb = new PermissionOutputPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PermissionOutput fromPb(PermissionOutputPb pb) { + PermissionOutput model = new PermissionOutput(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PermissionOutputSerializer extends JsonSerializer { + @Override + public void serialize(PermissionOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionOutputDeserializer extends JsonDeserializer { + @Override + public PermissionOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionOutputPb pb = mapper.readValue(p, PermissionOutputPb.class); + return PermissionOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutputPb.java new file mode 100755 index 000000000..13f78a38d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutputPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PermissionOutputPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private WorkspacePermission permissionLevel; + + public PermissionOutputPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PermissionOutputPb setPermissionLevel(WorkspacePermission permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WorkspacePermission getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionOutputPb that = (PermissionOutputPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PermissionOutputPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionPb.java new file mode 100755 index 000000000..ff545088e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + public PermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public PermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public PermissionPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionPb that = (PermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java index 87ef5a99a..632ffc7e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionsDescription.PermissionsDescriptionSerializer.class) +@JsonDeserialize(using = PermissionsDescription.PermissionsDescriptionDeserializer.class) public class PermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; public PermissionsDescription setDescription(String description) { @@ -56,4 +65,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PermissionsDescriptionPb toPb() { + PermissionsDescriptionPb pb = new PermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PermissionsDescription fromPb(PermissionsDescriptionPb pb) { + PermissionsDescription model = new PermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + PermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public PermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionsDescriptionPb pb = mapper.readValue(p, PermissionsDescriptionPb.class); + return PermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescriptionPb.java new file mode 100755 index 000000000..2480fda54 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + public PermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PermissionsDescriptionPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionsDescriptionPb that = (PermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java index 379ed77ad..5bc99509b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java @@ -24,7 +24,7 @@ public ObjectPermissions get(GetPermissionRequest request) { request.getRequestObjectType(), request.getRequestObjectId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ObjectPermissions.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques request.getRequestObjectType(), request.getRequestObjectId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPermissionLevelsResponse.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public ObjectPermissions set(SetObjectPermissions request) { request.getRequestObjectType(), request.getRequestObjectId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ObjectPermissions.class); @@ -73,7 +73,7 @@ public ObjectPermissions update(UpdateObjectPermissions request) { request.getRequestObjectType(), request.getRequestObjectId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ObjectPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java index 15cbe11ec..6fb262a6e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Information about the principal assigned to the workspace. */ @Generated +@JsonSerialize(using = PrincipalOutput.PrincipalOutputSerializer.class) +@JsonDeserialize(using = PrincipalOutput.PrincipalOutputDeserializer.class) public class PrincipalOutput { /** The display name of the principal. */ - @JsonProperty("display_name") private String displayName; /** The group name of the group. Present only if the principal is a group. */ - @JsonProperty("group_name") private String groupName; /** The unique, opaque id of the principal. */ - @JsonProperty("principal_id") private Long principalId; /** The name of the service principal. Present only if the principal is a service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** The username of the user. Present only if the principal is a user. */ - @JsonProperty("user_name") private String userName; public PrincipalOutput setDisplayName(String displayName) { @@ -102,4 +108,46 @@ public String toString() { .add("userName", userName) .toString(); } + + PrincipalOutputPb toPb() { + PrincipalOutputPb pb = new PrincipalOutputPb(); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setPrincipalId(principalId); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static PrincipalOutput fromPb(PrincipalOutputPb pb) { + PrincipalOutput model = new PrincipalOutput(); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setPrincipalId(pb.getPrincipalId()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class PrincipalOutputSerializer extends JsonSerializer { + @Override + public void serialize(PrincipalOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PrincipalOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PrincipalOutputDeserializer extends JsonDeserializer { + @Override + public PrincipalOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PrincipalOutputPb pb = mapper.readValue(p, PrincipalOutputPb.class); + return PrincipalOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutputPb.java new file mode 100755 index 000000000..86374f532 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutputPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Information about the principal assigned to the workspace. */ +@Generated +class PrincipalOutputPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("principal_id") + private Long principalId; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public PrincipalOutputPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public PrincipalOutputPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public PrincipalOutputPb setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public PrincipalOutputPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public PrincipalOutputPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PrincipalOutputPb that = (PrincipalOutputPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(principalId, that.principalId) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, groupName, principalId, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(PrincipalOutputPb.class) + .add("displayName", displayName) + .add("groupName", groupName) + .add("principalId", principalId) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java index ea6f7556b..cc59036ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java @@ -3,26 +3,30 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResourceInfo.ResourceInfoSerializer.class) +@JsonDeserialize(using = ResourceInfo.ResourceInfoDeserializer.class) public class ResourceInfo { /** Id of the current resource. */ - @JsonProperty("id") - @QueryParam("id") private String id; /** The legacy acl path of the current resource. */ - @JsonProperty("legacy_acl_path") - @QueryParam("legacy_acl_path") private String legacyAclPath; /** Parent resource info for the current resource. The parent may have another parent. */ - @JsonProperty("parent_resource_info") - @QueryParam("parent_resource_info") private ResourceInfo parentResourceInfo; public ResourceInfo setId(String id) { @@ -75,4 +79,41 @@ public String toString() { .add("parentResourceInfo", parentResourceInfo) .toString(); } + + ResourceInfoPb toPb() { + ResourceInfoPb pb = new ResourceInfoPb(); + pb.setId(id); + pb.setLegacyAclPath(legacyAclPath); + pb.setParentResourceInfo(parentResourceInfo); + + return pb; + } + + static ResourceInfo fromPb(ResourceInfoPb pb) { + ResourceInfo model = new ResourceInfo(); + model.setId(pb.getId()); + model.setLegacyAclPath(pb.getLegacyAclPath()); + model.setParentResourceInfo(pb.getParentResourceInfo()); + + return model; + } + + public static class ResourceInfoSerializer extends JsonSerializer { + @Override + public void serialize(ResourceInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResourceInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResourceInfoDeserializer extends JsonDeserializer { + @Override + public ResourceInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResourceInfoPb pb = mapper.readValue(p, ResourceInfoPb.class); + return ResourceInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfoPb.java new file mode 100755 index 000000000..dd53d089f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfoPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResourceInfoPb { + @JsonProperty("id") + @QueryParam("id") + private String id; + + @JsonProperty("legacy_acl_path") + @QueryParam("legacy_acl_path") + private String legacyAclPath; + + @JsonProperty("parent_resource_info") + @QueryParam("parent_resource_info") + private ResourceInfo parentResourceInfo; + + public ResourceInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ResourceInfoPb setLegacyAclPath(String legacyAclPath) { + this.legacyAclPath = legacyAclPath; + return this; + } + + public String getLegacyAclPath() { + return legacyAclPath; + } + + public ResourceInfoPb setParentResourceInfo(ResourceInfo parentResourceInfo) { + this.parentResourceInfo = parentResourceInfo; + return this; + } + + public ResourceInfo getParentResourceInfo() { + return parentResourceInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResourceInfoPb that = (ResourceInfoPb) o; + return Objects.equals(id, that.id) + && Objects.equals(legacyAclPath, that.legacyAclPath) + && Objects.equals(parentResourceInfo, that.parentResourceInfo); + } + + @Override + public int hashCode() { + return Objects.hash(id, legacyAclPath, parentResourceInfo); + } + + @Override + public String toString() { + return new ToStringer(ResourceInfoPb.class) + .add("id", id) + .add("legacyAclPath", legacyAclPath) + .add("parentResourceInfo", parentResourceInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMeta.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMeta.java index 909a29950..0b840ed7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMeta.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMeta.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResourceMeta.ResourceMetaSerializer.class) +@JsonDeserialize(using = ResourceMeta.ResourceMetaDeserializer.class) public class ResourceMeta { /** * Identifier for group type. Can be local workspace group (`WorkspaceGroup`) or account group * (`Group`). */ - @JsonProperty("resourceType") private String resourceType; public ResourceMeta setResourceType(String resourceType) { @@ -42,4 +52,37 @@ public int hashCode() { public String toString() { return new ToStringer(ResourceMeta.class).add("resourceType", resourceType).toString(); } + + ResourceMetaPb toPb() { + ResourceMetaPb pb = new ResourceMetaPb(); + pb.setResourceType(resourceType); + + return pb; + } + + static ResourceMeta fromPb(ResourceMetaPb pb) { + ResourceMeta model = new ResourceMeta(); + model.setResourceType(pb.getResourceType()); + + return model; + } + + public static class ResourceMetaSerializer extends JsonSerializer { + @Override + public void serialize(ResourceMeta value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResourceMetaPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResourceMetaDeserializer extends JsonDeserializer { + @Override + public ResourceMeta deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResourceMetaPb pb = mapper.readValue(p, ResourceMetaPb.class); + return ResourceMeta.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMetaPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMetaPb.java new file mode 100755 index 000000000..654235252 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceMetaPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResourceMetaPb { + @JsonProperty("resourceType") + private String resourceType; + + public ResourceMetaPb setResourceType(String resourceType) { + this.resourceType = resourceType; + return this; + } + + public String getResourceType() { + return resourceType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResourceMetaPb that = (ResourceMetaPb) o; + return Objects.equals(resourceType, that.resourceType); + } + + @Override + public int hashCode() { + return Objects.hash(resourceType); + } + + @Override + public String toString() { + return new ToStringer(ResourceMetaPb.class).add("resourceType", resourceType).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Role.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Role.java index 71980eaa0..f0d70ea22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Role.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Role.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Role.RoleSerializer.class) +@JsonDeserialize(using = Role.RoleDeserializer.class) public class Role { /** Role to assign to a principal or a list of principals on a resource. */ - @JsonProperty("name") private String name; public Role setName(String name) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Role.class).add("name", name).toString(); } + + RolePb toPb() { + RolePb pb = new RolePb(); + pb.setName(name); + + return pb; + } + + static Role fromPb(RolePb pb) { + Role model = new Role(); + model.setName(pb.getName()); + + return model; + } + + public static class RoleSerializer extends JsonSerializer { + @Override + public void serialize(Role value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RolePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RoleDeserializer extends JsonDeserializer { + @Override + public Role deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RolePb pb = mapper.readValue(p, RolePb.class); + return Role.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RolePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RolePb.java new file mode 100755 index 000000000..317286f41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RolePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RolePb { + @JsonProperty("name") + private String name; + + public RolePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RolePb that = (RolePb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(RolePb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java index 137292095..a7d4ac74b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RuleSetResponse.RuleSetResponseSerializer.class) +@JsonDeserialize(using = RuleSetResponse.RuleSetResponseDeserializer.class) public class RuleSetResponse { /** * Identifies the version of the rule set returned. Etag used for versioning. The response is at @@ -19,15 +30,12 @@ public class RuleSetResponse { * request, and pass it with the PUT update request to identify the rule set version you are * updating. */ - @JsonProperty("etag") private String etag; /** */ - @JsonProperty("grant_rules") private Collection grantRules; /** Name of the rule set. */ - @JsonProperty("name") private String name; public RuleSetResponse setEtag(String etag) { @@ -80,4 +88,42 @@ public String toString() { .add("name", name) .toString(); } + + RuleSetResponsePb toPb() { + RuleSetResponsePb pb = new RuleSetResponsePb(); + pb.setEtag(etag); + pb.setGrantRules(grantRules); + pb.setName(name); + + return pb; + } + + static RuleSetResponse fromPb(RuleSetResponsePb pb) { + RuleSetResponse model = new RuleSetResponse(); + model.setEtag(pb.getEtag()); + model.setGrantRules(pb.getGrantRules()); + model.setName(pb.getName()); + + return model; + } + + public static class RuleSetResponseSerializer extends JsonSerializer { + @Override + public void serialize(RuleSetResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RuleSetResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RuleSetResponseDeserializer extends JsonDeserializer { + @Override + public RuleSetResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RuleSetResponsePb pb = mapper.readValue(p, RuleSetResponsePb.class); + return RuleSetResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponsePb.java new file mode 100755 index 000000000..563a86f53 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RuleSetResponsePb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("grant_rules") + private Collection grantRules; + + @JsonProperty("name") + private String name; + + public RuleSetResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public RuleSetResponsePb setGrantRules(Collection grantRules) { + this.grantRules = grantRules; + return this; + } + + public Collection getGrantRules() { + return grantRules; + } + + public RuleSetResponsePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RuleSetResponsePb that = (RuleSetResponsePb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(grantRules, that.grantRules) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(etag, grantRules, name); + } + + @Override + public String toString() { + return new ToStringer(RuleSetResponsePb.class) + .add("etag", etag) + .add("grantRules", grantRules) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java index cd70639aa..30ec9c5ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RuleSetUpdateRequest.RuleSetUpdateRequestSerializer.class) +@JsonDeserialize(using = RuleSetUpdateRequest.RuleSetUpdateRequestDeserializer.class) public class RuleSetUpdateRequest { /** * Identifies the version of the rule set returned. Etag used for versioning. The response is at @@ -19,15 +30,12 @@ public class RuleSetUpdateRequest { * request, and pass it with the PUT update request to identify the rule set version you are * updating. */ - @JsonProperty("etag") private String etag; /** */ - @JsonProperty("grant_rules") private Collection grantRules; /** Name of the rule set. */ - @JsonProperty("name") private String name; public RuleSetUpdateRequest setEtag(String etag) { @@ -80,4 +88,44 @@ public String toString() { .add("name", name) .toString(); } + + RuleSetUpdateRequestPb toPb() { + RuleSetUpdateRequestPb pb = new RuleSetUpdateRequestPb(); + pb.setEtag(etag); + pb.setGrantRules(grantRules); + pb.setName(name); + + return pb; + } + + static RuleSetUpdateRequest fromPb(RuleSetUpdateRequestPb pb) { + RuleSetUpdateRequest model = new RuleSetUpdateRequest(); + model.setEtag(pb.getEtag()); + model.setGrantRules(pb.getGrantRules()); + model.setName(pb.getName()); + + return model; + } + + public static class RuleSetUpdateRequestSerializer extends JsonSerializer { + @Override + public void serialize( + RuleSetUpdateRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RuleSetUpdateRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RuleSetUpdateRequestDeserializer + extends JsonDeserializer { + @Override + public RuleSetUpdateRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RuleSetUpdateRequestPb pb = mapper.readValue(p, RuleSetUpdateRequestPb.class); + return RuleSetUpdateRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequestPb.java new file mode 100755 index 000000000..9b1cabd25 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RuleSetUpdateRequestPb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("grant_rules") + private Collection grantRules; + + @JsonProperty("name") + private String name; + + public RuleSetUpdateRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public RuleSetUpdateRequestPb setGrantRules(Collection grantRules) { + this.grantRules = grantRules; + return this; + } + + public Collection getGrantRules() { + return grantRules; + } + + public RuleSetUpdateRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RuleSetUpdateRequestPb that = (RuleSetUpdateRequestPb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(grantRules, that.grantRules) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(etag, grantRules, name); + } + + @Override + public String toString() { + return new ToStringer(RuleSetUpdateRequestPb.class) + .add("etag", etag) + .add("grantRules", grantRules) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipal.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipal.java index c2978b668..296cf7ef4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipal.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipal.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ServicePrincipal.ServicePrincipalSerializer.class) +@JsonDeserialize(using = ServicePrincipal.ServicePrincipalDeserializer.class) public class ServicePrincipal { /** If this user is active */ - @JsonProperty("active") private Boolean active; /** UUID relating to the service principal */ - @JsonProperty("applicationId") private String applicationId; /** String that represents a concatenation of given and family names. */ - @JsonProperty("displayName") private String displayName; /** @@ -29,27 +37,21 @@ public class ServicePrincipal { *

[assigning entitlements]: * https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements */ - @JsonProperty("entitlements") private Collection entitlements; /** */ - @JsonProperty("externalId") private String externalId; /** */ - @JsonProperty("groups") private Collection groups; /** Databricks service principal ID. */ - @JsonProperty("id") private String id; /** Corresponds to AWS instance profile/arn role. */ - @JsonProperty("roles") private Collection roles; /** The schema of the List response. */ - @JsonProperty("schemas") private Collection schemas; public ServicePrincipal setActive(Boolean active) { @@ -169,4 +171,54 @@ public String toString() { .add("schemas", schemas) .toString(); } + + ServicePrincipalPb toPb() { + ServicePrincipalPb pb = new ServicePrincipalPb(); + pb.setActive(active); + pb.setApplicationId(applicationId); + pb.setDisplayName(displayName); + pb.setEntitlements(entitlements); + pb.setExternalId(externalId); + pb.setGroups(groups); + pb.setId(id); + pb.setRoles(roles); + pb.setSchemas(schemas); + + return pb; + } + + static ServicePrincipal fromPb(ServicePrincipalPb pb) { + ServicePrincipal model = new ServicePrincipal(); + model.setActive(pb.getActive()); + model.setApplicationId(pb.getApplicationId()); + model.setDisplayName(pb.getDisplayName()); + model.setEntitlements(pb.getEntitlements()); + model.setExternalId(pb.getExternalId()); + model.setGroups(pb.getGroups()); + model.setId(pb.getId()); + model.setRoles(pb.getRoles()); + model.setSchemas(pb.getSchemas()); + + return model; + } + + public static class ServicePrincipalSerializer extends JsonSerializer { + @Override + public void serialize(ServicePrincipal value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServicePrincipalPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServicePrincipalDeserializer extends JsonDeserializer { + @Override + public ServicePrincipal deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServicePrincipalPb pb = mapper.readValue(p, ServicePrincipalPb.class); + return ServicePrincipal.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalPb.java new file mode 100755 index 000000000..61581a657 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalPb.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServicePrincipalPb { + @JsonProperty("active") + private Boolean active; + + @JsonProperty("applicationId") + private String applicationId; + + @JsonProperty("displayName") + private String displayName; + + @JsonProperty("entitlements") + private Collection entitlements; + + @JsonProperty("externalId") + private String externalId; + + @JsonProperty("groups") + private Collection groups; + + @JsonProperty("id") + private String id; + + @JsonProperty("roles") + private Collection roles; + + @JsonProperty("schemas") + private Collection schemas; + + public ServicePrincipalPb setActive(Boolean active) { + this.active = active; + return this; + } + + public Boolean getActive() { + return active; + } + + public ServicePrincipalPb setApplicationId(String applicationId) { + this.applicationId = applicationId; + return this; + } + + public String getApplicationId() { + return applicationId; + } + + public ServicePrincipalPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ServicePrincipalPb setEntitlements(Collection entitlements) { + this.entitlements = entitlements; + return this; + } + + public Collection getEntitlements() { + return entitlements; + } + + public ServicePrincipalPb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public ServicePrincipalPb setGroups(Collection groups) { + this.groups = groups; + return this; + } + + public Collection getGroups() { + return groups; + } + + public ServicePrincipalPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ServicePrincipalPb setRoles(Collection roles) { + this.roles = roles; + return this; + } + + public Collection getRoles() { + return roles; + } + + public ServicePrincipalPb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServicePrincipalPb that = (ServicePrincipalPb) o; + return Objects.equals(active, that.active) + && Objects.equals(applicationId, that.applicationId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(entitlements, that.entitlements) + && Objects.equals(externalId, that.externalId) + && Objects.equals(groups, that.groups) + && Objects.equals(id, that.id) + && Objects.equals(roles, that.roles) + && Objects.equals(schemas, that.schemas); + } + + @Override + public int hashCode() { + return Objects.hash( + active, applicationId, displayName, entitlements, externalId, groups, id, roles, schemas); + } + + @Override + public String toString() { + return new ToStringer(ServicePrincipalPb.class) + .add("active", active) + .add("applicationId", applicationId) + .add("displayName", displayName) + .add("entitlements", entitlements) + .add("externalId", externalId) + .add("groups", groups) + .add("id", id) + .add("roles", roles) + .add("schemas", schemas) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java index 28d5c5511..f28627207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java @@ -73,7 +73,7 @@ public ServicePrincipal get(GetServicePrincipalRequest request) { public Iterable list(ListServicePrincipalsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java index 0c6daef30..6c272c2c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java @@ -21,7 +21,7 @@ public ServicePrincipal create(ServicePrincipal request) { String path = "/api/2.0/preview/scim/v2/ServicePrincipals"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServicePrincipal.class); @@ -35,7 +35,7 @@ public void delete(DeleteServicePrincipalRequest request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public ServicePrincipal get(GetServicePrincipalRequest request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ServicePrincipal.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) { String path = "/api/2.0/preview/scim/v2/ServicePrincipals"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListServicePrincipalResponse.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public void update(ServicePrincipal request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java index 0f3b404da..fcaa47666 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SetObjectPermissions.SetObjectPermissionsSerializer.class) +@JsonDeserialize(using = SetObjectPermissions.SetObjectPermissionsDeserializer.class) public class SetObjectPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The id of the request object. */ - @JsonIgnore private String requestObjectId; + private String requestObjectId; /** * The type of the request object. Can be one of the following: alerts, authorization, clusters, @@ -24,7 +33,7 @@ public class SetObjectPermissions { * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ - @JsonIgnore private String requestObjectType; + private String requestObjectType; public SetObjectPermissions setAccessControlList( Collection accessControlList) { @@ -77,4 +86,44 @@ public String toString() { .add("requestObjectType", requestObjectType) .toString(); } + + SetObjectPermissionsPb toPb() { + SetObjectPermissionsPb pb = new SetObjectPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setRequestObjectId(requestObjectId); + pb.setRequestObjectType(requestObjectType); + + return pb; + } + + static SetObjectPermissions fromPb(SetObjectPermissionsPb pb) { + SetObjectPermissions model = new SetObjectPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setRequestObjectId(pb.getRequestObjectId()); + model.setRequestObjectType(pb.getRequestObjectType()); + + return model; + } + + public static class SetObjectPermissionsSerializer extends JsonSerializer { + @Override + public void serialize( + SetObjectPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetObjectPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetObjectPermissionsDeserializer + extends JsonDeserializer { + @Override + public SetObjectPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetObjectPermissionsPb pb = mapper.readValue(p, SetObjectPermissionsPb.class); + return SetObjectPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissionsPb.java new file mode 100755 index 000000000..4da4dd6f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissionsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SetObjectPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String requestObjectId; + + @JsonIgnore private String requestObjectType; + + public SetObjectPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public SetObjectPermissionsPb setRequestObjectId(String requestObjectId) { + this.requestObjectId = requestObjectId; + return this; + } + + public String getRequestObjectId() { + return requestObjectId; + } + + public SetObjectPermissionsPb setRequestObjectType(String requestObjectType) { + this.requestObjectType = requestObjectType; + return this; + } + + public String getRequestObjectType() { + return requestObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetObjectPermissionsPb that = (SetObjectPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(requestObjectId, that.requestObjectId) + && Objects.equals(requestObjectType, that.requestObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, requestObjectId, requestObjectType); + } + + @Override + public String toString() { + return new ToStringer(SetObjectPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("requestObjectId", requestObjectId) + .add("requestObjectType", requestObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java index 21ce2f907..f90454814 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateObjectPermissions.UpdateObjectPermissionsSerializer.class) +@JsonDeserialize(using = UpdateObjectPermissions.UpdateObjectPermissionsDeserializer.class) public class UpdateObjectPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The id of the request object. */ - @JsonIgnore private String requestObjectId; + private String requestObjectId; /** * The type of the request object. Can be one of the following: alerts, authorization, clusters, @@ -24,7 +33,7 @@ public class UpdateObjectPermissions { * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ - @JsonIgnore private String requestObjectType; + private String requestObjectType; public UpdateObjectPermissions setAccessControlList( Collection accessControlList) { @@ -77,4 +86,45 @@ public String toString() { .add("requestObjectType", requestObjectType) .toString(); } + + UpdateObjectPermissionsPb toPb() { + UpdateObjectPermissionsPb pb = new UpdateObjectPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setRequestObjectId(requestObjectId); + pb.setRequestObjectType(requestObjectType); + + return pb; + } + + static UpdateObjectPermissions fromPb(UpdateObjectPermissionsPb pb) { + UpdateObjectPermissions model = new UpdateObjectPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setRequestObjectId(pb.getRequestObjectId()); + model.setRequestObjectType(pb.getRequestObjectType()); + + return model; + } + + public static class UpdateObjectPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateObjectPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateObjectPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateObjectPermissionsDeserializer + extends JsonDeserializer { + @Override + public UpdateObjectPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateObjectPermissionsPb pb = mapper.readValue(p, UpdateObjectPermissionsPb.class); + return UpdateObjectPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissionsPb.java new file mode 100755 index 000000000..77d0d9bf0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissionsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateObjectPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String requestObjectId; + + @JsonIgnore private String requestObjectType; + + public UpdateObjectPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public UpdateObjectPermissionsPb setRequestObjectId(String requestObjectId) { + this.requestObjectId = requestObjectId; + return this; + } + + public String getRequestObjectId() { + return requestObjectId; + } + + public UpdateObjectPermissionsPb setRequestObjectType(String requestObjectType) { + this.requestObjectType = requestObjectType; + return this; + } + + public String getRequestObjectType() { + return requestObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateObjectPermissionsPb that = (UpdateObjectPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(requestObjectId, that.requestObjectId) + && Objects.equals(requestObjectType, that.requestObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, requestObjectId, requestObjectType); + } + + @Override + public String toString() { + return new ToStringer(UpdateObjectPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("requestObjectId", requestObjectId) + .add("requestObjectType", requestObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java index 251e169d9..09503726d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponsePb.java new file mode 100755 index 000000000..55d8cf2e5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java index b9ea7ac5b..3a468a608 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRuleSetRequest.UpdateRuleSetRequestSerializer.class) +@JsonDeserialize(using = UpdateRuleSetRequest.UpdateRuleSetRequestDeserializer.class) public class UpdateRuleSetRequest { /** Name of the rule set. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("rule_set") private RuleSetUpdateRequest ruleSet; public UpdateRuleSetRequest setName(String name) { @@ -55,4 +64,42 @@ public String toString() { .add("ruleSet", ruleSet) .toString(); } + + UpdateRuleSetRequestPb toPb() { + UpdateRuleSetRequestPb pb = new UpdateRuleSetRequestPb(); + pb.setName(name); + pb.setRuleSet(ruleSet); + + return pb; + } + + static UpdateRuleSetRequest fromPb(UpdateRuleSetRequestPb pb) { + UpdateRuleSetRequest model = new UpdateRuleSetRequest(); + model.setName(pb.getName()); + model.setRuleSet(pb.getRuleSet()); + + return model; + } + + public static class UpdateRuleSetRequestSerializer extends JsonSerializer { + @Override + public void serialize( + UpdateRuleSetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRuleSetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRuleSetRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateRuleSetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRuleSetRequestPb pb = mapper.readValue(p, UpdateRuleSetRequestPb.class); + return UpdateRuleSetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequestPb.java new file mode 100755 index 000000000..fa1241428 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRuleSetRequestPb { + @JsonProperty("name") + private String name; + + @JsonProperty("rule_set") + private RuleSetUpdateRequest ruleSet; + + public UpdateRuleSetRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateRuleSetRequestPb setRuleSet(RuleSetUpdateRequest ruleSet) { + this.ruleSet = ruleSet; + return this; + } + + public RuleSetUpdateRequest getRuleSet() { + return ruleSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRuleSetRequestPb that = (UpdateRuleSetRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(ruleSet, that.ruleSet); + } + + @Override + public int hashCode() { + return Objects.hash(name, ruleSet); + } + + @Override + public String toString() { + return new ToStringer(UpdateRuleSetRequestPb.class) + .add("name", name) + .add("ruleSet", ruleSet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java index 4ee2e430c..15d71f0fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java @@ -4,12 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateWorkspaceAssignments.UpdateWorkspaceAssignmentsSerializer.class) +@JsonDeserialize(using = UpdateWorkspaceAssignments.UpdateWorkspaceAssignmentsDeserializer.class) public class UpdateWorkspaceAssignments { /** * Array of permissions assignments to update on the workspace. Valid values are "USER" and @@ -18,14 +28,13 @@ public class UpdateWorkspaceAssignments { * will have the same effect as providing an empty list, which will result in the deletion of all * permissions for the principal. */ - @JsonProperty("permissions") private Collection permissions; /** The ID of the user, service principal, or group. */ - @JsonIgnore private Long principalId; + private Long principalId; /** The workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public UpdateWorkspaceAssignments setPermissions(Collection permissions) { this.permissions = permissions; @@ -77,4 +86,45 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + UpdateWorkspaceAssignmentsPb toPb() { + UpdateWorkspaceAssignmentsPb pb = new UpdateWorkspaceAssignmentsPb(); + pb.setPermissions(permissions); + pb.setPrincipalId(principalId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static UpdateWorkspaceAssignments fromPb(UpdateWorkspaceAssignmentsPb pb) { + UpdateWorkspaceAssignments model = new UpdateWorkspaceAssignments(); + model.setPermissions(pb.getPermissions()); + model.setPrincipalId(pb.getPrincipalId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class UpdateWorkspaceAssignmentsSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceAssignments value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceAssignmentsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceAssignmentsDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceAssignments deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceAssignmentsPb pb = mapper.readValue(p, UpdateWorkspaceAssignmentsPb.class); + return UpdateWorkspaceAssignments.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignmentsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignmentsPb.java new file mode 100755 index 000000000..910fa0f76 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignmentsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateWorkspaceAssignmentsPb { + @JsonProperty("permissions") + private Collection permissions; + + @JsonIgnore private Long principalId; + + @JsonIgnore private Long workspaceId; + + public UpdateWorkspaceAssignmentsPb setPermissions(Collection permissions) { + this.permissions = permissions; + return this; + } + + public Collection getPermissions() { + return permissions; + } + + public UpdateWorkspaceAssignmentsPb setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public UpdateWorkspaceAssignmentsPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceAssignmentsPb that = (UpdateWorkspaceAssignmentsPb) o; + return Objects.equals(permissions, that.permissions) + && Objects.equals(principalId, that.principalId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(permissions, principalId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceAssignmentsPb.class) + .add("permissions", permissions) + .add("principalId", principalId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java index b107ee340..85411d96f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = User.UserSerializer.class) +@JsonDeserialize(using = User.UserDeserializer.class) public class User { /** If this user is active */ - @JsonProperty("active") private Boolean active; /** @@ -22,11 +32,9 @@ public class User { *

[identity federation is enabled]: * https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation */ - @JsonProperty("displayName") private String displayName; /** All the emails associated with the Databricks user. */ - @JsonProperty("emails") private Collection emails; /** @@ -36,35 +44,27 @@ public class User { *

[assigning entitlements]: * https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements */ - @JsonProperty("entitlements") private Collection entitlements; /** External ID is not currently supported. It is reserved for future use. */ - @JsonProperty("externalId") private String externalId; /** */ - @JsonProperty("groups") private Collection groups; /** Databricks user ID. */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("name") private Name name; /** Corresponds to AWS instance profile/arn role. */ - @JsonProperty("roles") private Collection roles; /** The schema of the user. */ - @JsonProperty("schemas") private Collection schemas; /** Email address of the Databricks user. */ - @JsonProperty("userName") private String userName; public User setActive(Boolean active) { @@ -216,4 +216,57 @@ public String toString() { .add("userName", userName) .toString(); } + + UserPb toPb() { + UserPb pb = new UserPb(); + pb.setActive(active); + pb.setDisplayName(displayName); + pb.setEmails(emails); + pb.setEntitlements(entitlements); + pb.setExternalId(externalId); + pb.setGroups(groups); + pb.setId(id); + pb.setName(name); + pb.setRoles(roles); + pb.setSchemas(schemas); + pb.setUserName(userName); + + return pb; + } + + static User fromPb(UserPb pb) { + User model = new User(); + model.setActive(pb.getActive()); + model.setDisplayName(pb.getDisplayName()); + model.setEmails(pb.getEmails()); + model.setEntitlements(pb.getEntitlements()); + model.setExternalId(pb.getExternalId()); + model.setGroups(pb.getGroups()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setRoles(pb.getRoles()); + model.setSchemas(pb.getSchemas()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class UserSerializer extends JsonSerializer { + @Override + public void serialize(User value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UserPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UserDeserializer extends JsonDeserializer { + @Override + public User deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UserPb pb = mapper.readValue(p, UserPb.class); + return User.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserPb.java new file mode 100755 index 000000000..2f94e5066 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UserPb { + @JsonProperty("active") + private Boolean active; + + @JsonProperty("displayName") + private String displayName; + + @JsonProperty("emails") + private Collection emails; + + @JsonProperty("entitlements") + private Collection entitlements; + + @JsonProperty("externalId") + private String externalId; + + @JsonProperty("groups") + private Collection groups; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private Name name; + + @JsonProperty("roles") + private Collection roles; + + @JsonProperty("schemas") + private Collection schemas; + + @JsonProperty("userName") + private String userName; + + public UserPb setActive(Boolean active) { + this.active = active; + return this; + } + + public Boolean getActive() { + return active; + } + + public UserPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UserPb setEmails(Collection emails) { + this.emails = emails; + return this; + } + + public Collection getEmails() { + return emails; + } + + public UserPb setEntitlements(Collection entitlements) { + this.entitlements = entitlements; + return this; + } + + public Collection getEntitlements() { + return entitlements; + } + + public UserPb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public UserPb setGroups(Collection groups) { + this.groups = groups; + return this; + } + + public Collection getGroups() { + return groups; + } + + public UserPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UserPb setName(Name name) { + this.name = name; + return this; + } + + public Name getName() { + return name; + } + + public UserPb setRoles(Collection roles) { + this.roles = roles; + return this; + } + + public Collection getRoles() { + return roles; + } + + public UserPb setSchemas(Collection schemas) { + this.schemas = schemas; + return this; + } + + public Collection getSchemas() { + return schemas; + } + + public UserPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UserPb that = (UserPb) o; + return Objects.equals(active, that.active) + && Objects.equals(displayName, that.displayName) + && Objects.equals(emails, that.emails) + && Objects.equals(entitlements, that.entitlements) + && Objects.equals(externalId, that.externalId) + && Objects.equals(groups, that.groups) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(roles, that.roles) + && Objects.equals(schemas, that.schemas) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash( + active, + displayName, + emails, + entitlements, + externalId, + groups, + id, + name, + roles, + schemas, + userName); + } + + @Override + public String toString() { + return new ToStringer(UserPb.class) + .add("active", active) + .add("displayName", displayName) + .add("emails", emails) + .add("entitlements", entitlements) + .add("externalId", externalId) + .add("groups", groups) + .add("id", id) + .add("name", name) + .add("roles", roles) + .add("schemas", schemas) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java index d079aba02..d5797926d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java @@ -98,7 +98,7 @@ public PasswordPermissions getPermissions() { public Iterable list(ListUsersRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java index eb980dd36..e88c59b04 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java @@ -21,7 +21,7 @@ public User create(User request) { String path = "/api/2.0/preview/scim/v2/Users"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, User.class); @@ -35,7 +35,7 @@ public void delete(DeleteUserRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public User get(GetUserRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, User.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public ListUsersResponse list(ListUsersRequest request) { String path = "/api/2.0/preview/scim/v2/Users"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListUsersResponse.class); } catch (IOException e) { @@ -97,7 +97,7 @@ public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -110,7 +110,7 @@ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/passwords"; try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PasswordPermissions.class); @@ -124,7 +124,7 @@ public void update(User request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { @@ -137,7 +137,7 @@ public PasswordPermissions updatePermissions(PasswordPermissionsRequest request) String path = "/api/2.0/permissions/authorization/passwords"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PasswordPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java index d163fe44c..30ba01f95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java @@ -24,7 +24,7 @@ public void delete(DeleteWorkspaceAssignmentRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteWorkspacePermissionAssignmentResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public WorkspacePermissions get(GetWorkspaceAssignmentRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WorkspacePermissions.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public PermissionAssignments list(ListWorkspaceAssignmentRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PermissionAssignments.class); } catch (IOException e) { @@ -72,7 +72,7 @@ public PermissionAssignment update(UpdateWorkspaceAssignments request) { apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PermissionAssignment.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java index e053cf4b9..afc42ad8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WorkspacePermissions.WorkspacePermissionsSerializer.class) +@JsonDeserialize(using = WorkspacePermissions.WorkspacePermissionsDeserializer.class) public class WorkspacePermissions { /** Array of permissions defined for a workspace. */ - @JsonProperty("permissions") private Collection permissions; public WorkspacePermissions setPermissions(Collection permissions) { @@ -40,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(WorkspacePermissions.class).add("permissions", permissions).toString(); } + + WorkspacePermissionsPb toPb() { + WorkspacePermissionsPb pb = new WorkspacePermissionsPb(); + pb.setPermissions(permissions); + + return pb; + } + + static WorkspacePermissions fromPb(WorkspacePermissionsPb pb) { + WorkspacePermissions model = new WorkspacePermissions(); + model.setPermissions(pb.getPermissions()); + + return model; + } + + public static class WorkspacePermissionsSerializer extends JsonSerializer { + @Override + public void serialize( + WorkspacePermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspacePermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspacePermissionsDeserializer + extends JsonDeserializer { + @Override + public WorkspacePermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspacePermissionsPb pb = mapper.readValue(p, WorkspacePermissionsPb.class); + return WorkspacePermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissionsPb.java new file mode 100755 index 000000000..7bddce447 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissionsPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WorkspacePermissionsPb { + @JsonProperty("permissions") + private Collection permissions; + + public WorkspacePermissionsPb setPermissions(Collection permissions) { + this.permissions = permissions; + return this; + } + + public Collection getPermissions() { + return permissions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspacePermissionsPb that = (WorkspacePermissionsPb) o; + return Objects.equals(permissions, that.permissions); + } + + @Override + public int hashCode() { + return Objects.hash(permissions); + } + + @Override + public String toString() { + return new ToStringer(WorkspacePermissionsPb.class).add("permissions", permissions).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnown.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnown.java new file mode 100755 index 000000000..72780fffa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnown.java @@ -0,0 +1,418 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.protobuf.Duration; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; + +@Generated +@JsonSerialize(using = AllWellKnown.AllWellKnownSerializer.class) +@JsonDeserialize(using = AllWellKnown.AllWellKnownDeserializer.class) +public class AllWellKnown { + /** */ + private java.time.Duration duration; + + /** + * optional google.protobuf.Struct struct = 3; + * + *

The field mask must be a single string, with multiple fields separated by commas (no + * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate + * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is + * not allowed, as only the entire collection field can be specified. Field names must exactly + * match the resource field names. + */ + private String fieldMask; + + /** */ + private Collection listValue; + + /** */ + private Collection repeatedDuration; + + /** repeated google.protobuf.Struct repeated_struct = 13; */ + private Collection> repeatedFieldMask; + + /** */ + private Collection> repeatedListValue; + + /** */ + private Collection repeatedTimestamp; + + /** */ + private Collection repeatedValue; + + /** */ + private java.time.Duration requiredDuration; + + /** + * optional google.protobuf.Struct required_struct = 8 [ (validate_required) = true ]; + * + *

The field mask must be a single string, with multiple fields separated by commas (no + * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate + * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is + * not allowed, as only the entire collection field can be specified. Field names must exactly + * match the resource field names. + */ + private String requiredFieldMask; + + /** */ + private Collection requiredListValue; + + /** */ + private String requiredTimestamp; + + /** */ + private JsonNode requiredValue; + + /** */ + private String timestamp; + + /** */ + private JsonNode value; + + public AllWellKnown setDuration(java.time.Duration duration) { + this.duration = duration; + return this; + } + + public java.time.Duration getDuration() { + return duration; + } + + public AllWellKnown setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public AllWellKnown setListValue(Collection listValue) { + this.listValue = listValue; + return this; + } + + public Collection getListValue() { + return listValue; + } + + public AllWellKnown setRepeatedDuration(Collection repeatedDuration) { + this.repeatedDuration = repeatedDuration; + return this; + } + + public Collection getRepeatedDuration() { + return repeatedDuration; + } + + public AllWellKnown setRepeatedFieldMask(Collection> repeatedFieldMask) { + this.repeatedFieldMask = repeatedFieldMask; + return this; + } + + public Collection> getRepeatedFieldMask() { + return repeatedFieldMask; + } + + public AllWellKnown setRepeatedListValue(Collection> repeatedListValue) { + this.repeatedListValue = repeatedListValue; + return this; + } + + public Collection> getRepeatedListValue() { + return repeatedListValue; + } + + public AllWellKnown setRepeatedTimestamp(Collection repeatedTimestamp) { + this.repeatedTimestamp = repeatedTimestamp; + return this; + } + + public Collection getRepeatedTimestamp() { + return repeatedTimestamp; + } + + public AllWellKnown setRepeatedValue(Collection repeatedValue) { + this.repeatedValue = repeatedValue; + return this; + } + + public Collection getRepeatedValue() { + return repeatedValue; + } + + public AllWellKnown setRequiredDuration(java.time.Duration requiredDuration) { + this.requiredDuration = requiredDuration; + return this; + } + + public java.time.Duration getRequiredDuration() { + return requiredDuration; + } + + public AllWellKnown setRequiredFieldMask(String requiredFieldMask) { + this.requiredFieldMask = requiredFieldMask; + return this; + } + + public String getRequiredFieldMask() { + return requiredFieldMask; + } + + public AllWellKnown setRequiredListValue(Collection requiredListValue) { + this.requiredListValue = requiredListValue; + return this; + } + + public Collection getRequiredListValue() { + return requiredListValue; + } + + public AllWellKnown setRequiredTimestamp(String requiredTimestamp) { + this.requiredTimestamp = requiredTimestamp; + return this; + } + + public String getRequiredTimestamp() { + return requiredTimestamp; + } + + public AllWellKnown setRequiredValue(JsonNode requiredValue) { + this.requiredValue = requiredValue; + return this; + } + + public JsonNode getRequiredValue() { + return requiredValue; + } + + public AllWellKnown setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public AllWellKnown setValue(JsonNode value) { + this.value = value; + return this; + } + + public JsonNode getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AllWellKnown that = (AllWellKnown) o; + return Objects.equals(duration, that.duration) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(listValue, that.listValue) + && Objects.equals(repeatedDuration, that.repeatedDuration) + && Objects.equals(repeatedFieldMask, that.repeatedFieldMask) + && Objects.equals(repeatedListValue, that.repeatedListValue) + && Objects.equals(repeatedTimestamp, that.repeatedTimestamp) + && Objects.equals(repeatedValue, that.repeatedValue) + && Objects.equals(requiredDuration, that.requiredDuration) + && Objects.equals(requiredFieldMask, that.requiredFieldMask) + && Objects.equals(requiredListValue, that.requiredListValue) + && Objects.equals(requiredTimestamp, that.requiredTimestamp) + && Objects.equals(requiredValue, that.requiredValue) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + duration, + fieldMask, + listValue, + repeatedDuration, + repeatedFieldMask, + repeatedListValue, + repeatedTimestamp, + repeatedValue, + requiredDuration, + requiredFieldMask, + requiredListValue, + requiredTimestamp, + requiredValue, + timestamp, + value); + } + + @Override + public String toString() { + return new ToStringer(AllWellKnown.class) + .add("duration", duration) + .add("fieldMask", fieldMask) + .add("listValue", listValue) + .add("repeatedDuration", repeatedDuration) + .add("repeatedFieldMask", repeatedFieldMask) + .add("repeatedListValue", repeatedListValue) + .add("repeatedTimestamp", repeatedTimestamp) + .add("repeatedValue", repeatedValue) + .add("requiredDuration", requiredDuration) + .add("requiredFieldMask", requiredFieldMask) + .add("requiredListValue", requiredListValue) + .add("requiredTimestamp", requiredTimestamp) + .add("requiredValue", requiredValue) + .add("timestamp", timestamp) + .add("value", value) + .toString(); + } + + AllWellKnownPb toPb() { + AllWellKnownPb pb = new AllWellKnownPb(); + if (duration != null) { + pb.setDuration(Converters.durationToPb(duration)); + } + pb.setFieldMask(fieldMask); + pb.setListValue(listValue); + if (repeatedDuration != null) { + List repeatedDurationPb = new ArrayList<>(); + for (java.time.Duration item : repeatedDuration) { + if (item != null) { + repeatedDurationPb.add(Converters.durationToPb(item)); + } else { + repeatedDurationPb.add(null); + } + } + pb.setRepeatedDuration(repeatedDurationPb); + } + if (repeatedFieldMask != null) { + List repeatedFieldMaskPb = new ArrayList<>(); + for (List item : repeatedFieldMask) { + if (item != null) { + repeatedFieldMaskPb.add(Converters.fieldMaskToPb(item)); + } else { + repeatedFieldMaskPb.add(null); + } + } + pb.setRepeatedFieldMask(repeatedFieldMaskPb); + } + pb.setRepeatedListValue(repeatedListValue); + if (repeatedTimestamp != null) { + List repeatedTimestampPb = new ArrayList<>(); + for (Instant item : repeatedTimestamp) { + if (item != null) { + repeatedTimestampPb.add(Converters.instantToPb(item)); + } else { + repeatedTimestampPb.add(null); + } + } + pb.setRepeatedTimestamp(repeatedTimestampPb); + } + pb.setRepeatedValue(repeatedValue); + if (requiredDuration != null) { + pb.setRequiredDuration(Converters.durationToPb(requiredDuration)); + } + pb.setRequiredFieldMask(requiredFieldMask); + pb.setRequiredListValue(requiredListValue); + pb.setRequiredTimestamp(requiredTimestamp); + pb.setRequiredValue(requiredValue); + pb.setTimestamp(timestamp); + pb.setValue(value); + + return pb; + } + + static AllWellKnown fromPb(AllWellKnownPb pb) { + AllWellKnown model = new AllWellKnown(); + if (pb.getDuration() != null) { + model.setDuration(Converters.durationFromPb(pb.getDuration())); + } + model.setFieldMask(pb.getFieldMask()); + model.setListValue(pb.getListValue()); + if (pb.getRepeatedDuration() != null) { + List repeatedDuration = new ArrayList<>(); + for (Duration item : pb.getRepeatedDuration()) { + if (item != null) { + repeatedDuration.add(Converters.durationFromPb(item)); + } else { + repeatedDuration.add(null); + } + } + model.setRepeatedDuration(repeatedDuration); + } + if (pb.getRepeatedFieldMask() != null) { + List> repeatedFieldMask = new ArrayList<>(); + for (FieldMask item : pb.getRepeatedFieldMask()) { + if (item != null) { + repeatedFieldMask.add(Converters.fieldMaskFromPb(item)); + } else { + repeatedFieldMask.add(null); + } + } + model.setRepeatedFieldMask(repeatedFieldMask); + } + model.setRepeatedListValue(pb.getRepeatedListValue()); + if (pb.getRepeatedTimestamp() != null) { + List repeatedTimestamp = new ArrayList<>(); + for (Timestamp item : pb.getRepeatedTimestamp()) { + if (item != null) { + repeatedTimestamp.add(Converters.instantFromPb(item)); + } else { + repeatedTimestamp.add(null); + } + } + model.setRepeatedTimestamp(repeatedTimestamp); + } + model.setRepeatedValue(pb.getRepeatedValue()); + if (pb.getRequiredDuration() != null) { + model.setRequiredDuration(Converters.durationFromPb(pb.getRequiredDuration())); + } + model.setRequiredFieldMask(pb.getRequiredFieldMask()); + model.setRequiredListValue(pb.getRequiredListValue()); + model.setRequiredTimestamp(pb.getRequiredTimestamp()); + model.setRequiredValue(pb.getRequiredValue()); + model.setTimestamp(pb.getTimestamp()); + model.setValue(pb.getValue()); + + return model; + } + + public static class AllWellKnownSerializer extends JsonSerializer { + @Override + public void serialize(AllWellKnown value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AllWellKnownPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AllWellKnownDeserializer extends JsonDeserializer { + @Override + public AllWellKnown deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AllWellKnownPb pb = mapper.readValue(p, AllWellKnownPb.class); + return AllWellKnown.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnownPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnownPb.java new file mode 100755 index 000000000..e50cfcdfa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AllWellKnownPb.java @@ -0,0 +1,267 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.core.serialization.DurationDeserializer; +import com.databricks.sdk.core.serialization.DurationSerializer; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.protobuf.Duration; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AllWellKnownPb { + @JsonProperty("duration") + @JsonSerialize(using = DurationSerializer.class) + @JsonDeserialize(using = DurationDeserializer.class) + private Duration duration; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("list_value") + private Collection listValue; + + @JsonProperty("repeated_duration") + private Collection repeatedDuration; + + @JsonProperty("repeated_field_mask") + private Collection repeatedFieldMask; + + @JsonProperty("repeated_list_value") + private Collection> repeatedListValue; + + @JsonProperty("repeated_timestamp") + private Collection repeatedTimestamp; + + @JsonProperty("repeated_value") + private Collection repeatedValue; + + @JsonProperty("required_duration") + @JsonSerialize(using = DurationSerializer.class) + @JsonDeserialize(using = DurationDeserializer.class) + private Duration requiredDuration; + + @JsonProperty("required_field_mask") + private String requiredFieldMask; + + @JsonProperty("required_list_value") + private Collection requiredListValue; + + @JsonProperty("required_timestamp") + private String requiredTimestamp; + + @JsonProperty("required_value") + private JsonNode requiredValue; + + @JsonProperty("timestamp") + private String timestamp; + + @JsonProperty("value") + private JsonNode value; + + public AllWellKnownPb setDuration(Duration duration) { + this.duration = duration; + return this; + } + + public Duration getDuration() { + return duration; + } + + public AllWellKnownPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public AllWellKnownPb setListValue(Collection listValue) { + this.listValue = listValue; + return this; + } + + public Collection getListValue() { + return listValue; + } + + public AllWellKnownPb setRepeatedDuration(Collection repeatedDuration) { + this.repeatedDuration = repeatedDuration; + return this; + } + + public Collection getRepeatedDuration() { + return repeatedDuration; + } + + public AllWellKnownPb setRepeatedFieldMask(Collection repeatedFieldMask) { + this.repeatedFieldMask = repeatedFieldMask; + return this; + } + + public Collection getRepeatedFieldMask() { + return repeatedFieldMask; + } + + public AllWellKnownPb setRepeatedListValue(Collection> repeatedListValue) { + this.repeatedListValue = repeatedListValue; + return this; + } + + public Collection> getRepeatedListValue() { + return repeatedListValue; + } + + public AllWellKnownPb setRepeatedTimestamp(Collection repeatedTimestamp) { + this.repeatedTimestamp = repeatedTimestamp; + return this; + } + + public Collection getRepeatedTimestamp() { + return repeatedTimestamp; + } + + public AllWellKnownPb setRepeatedValue(Collection repeatedValue) { + this.repeatedValue = repeatedValue; + return this; + } + + public Collection getRepeatedValue() { + return repeatedValue; + } + + public AllWellKnownPb setRequiredDuration(Duration requiredDuration) { + this.requiredDuration = requiredDuration; + return this; + } + + public Duration getRequiredDuration() { + return requiredDuration; + } + + public AllWellKnownPb setRequiredFieldMask(String requiredFieldMask) { + this.requiredFieldMask = requiredFieldMask; + return this; + } + + public String getRequiredFieldMask() { + return requiredFieldMask; + } + + public AllWellKnownPb setRequiredListValue(Collection requiredListValue) { + this.requiredListValue = requiredListValue; + return this; + } + + public Collection getRequiredListValue() { + return requiredListValue; + } + + public AllWellKnownPb setRequiredTimestamp(String requiredTimestamp) { + this.requiredTimestamp = requiredTimestamp; + return this; + } + + public String getRequiredTimestamp() { + return requiredTimestamp; + } + + public AllWellKnownPb setRequiredValue(JsonNode requiredValue) { + this.requiredValue = requiredValue; + return this; + } + + public JsonNode getRequiredValue() { + return requiredValue; + } + + public AllWellKnownPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public AllWellKnownPb setValue(JsonNode value) { + this.value = value; + return this; + } + + public JsonNode getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AllWellKnownPb that = (AllWellKnownPb) o; + return Objects.equals(duration, that.duration) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(listValue, that.listValue) + && Objects.equals(repeatedDuration, that.repeatedDuration) + && Objects.equals(repeatedFieldMask, that.repeatedFieldMask) + && Objects.equals(repeatedListValue, that.repeatedListValue) + && Objects.equals(repeatedTimestamp, that.repeatedTimestamp) + && Objects.equals(repeatedValue, that.repeatedValue) + && Objects.equals(requiredDuration, that.requiredDuration) + && Objects.equals(requiredFieldMask, that.requiredFieldMask) + && Objects.equals(requiredListValue, that.requiredListValue) + && Objects.equals(requiredTimestamp, that.requiredTimestamp) + && Objects.equals(requiredValue, that.requiredValue) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + duration, + fieldMask, + listValue, + repeatedDuration, + repeatedFieldMask, + repeatedListValue, + repeatedTimestamp, + repeatedValue, + requiredDuration, + requiredFieldMask, + requiredListValue, + requiredTimestamp, + requiredValue, + timestamp, + value); + } + + @Override + public String toString() { + return new ToStringer(AllWellKnownPb.class) + .add("duration", duration) + .add("fieldMask", fieldMask) + .add("listValue", listValue) + .add("repeatedDuration", repeatedDuration) + .add("repeatedFieldMask", repeatedFieldMask) + .add("repeatedListValue", repeatedListValue) + .add("repeatedTimestamp", repeatedTimestamp) + .add("repeatedValue", repeatedValue) + .add("requiredDuration", requiredDuration) + .add("requiredFieldMask", requiredFieldMask) + .add("requiredListValue", requiredListValue) + .add("requiredTimestamp", requiredTimestamp) + .add("requiredValue", requiredValue) + .add("timestamp", timestamp) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index fa9f0ab77..5b840b77a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = BaseJob.BaseJobSerializer.class) +@JsonDeserialize(using = BaseJob.BaseJobDeserializer.class) public class BaseJob { /** * The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC). */ - @JsonProperty("created_time") private Long createdTime; /** * The creator user name. This field won’t be included in the response if the user has already * been deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** @@ -28,7 +37,6 @@ public class BaseJob { * Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based * on accessible budget policies of the run_as identity on job creation or modification. */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; /** @@ -36,18 +44,15 @@ public class BaseJob { * They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 * :method:jobs/list requests with `expand_tasks=true`. */ - @JsonProperty("has_more") private Boolean hasMore; /** The canonical identifier for this job. */ - @JsonProperty("job_id") private Long jobId; /** * Settings for this job and all of its runs. These settings can be updated using the `resetJob` * method. */ - @JsonProperty("settings") private JobSettings settings; public BaseJob setCreatedTime(Long createdTime) { @@ -134,4 +139,47 @@ public String toString() { .add("settings", settings) .toString(); } + + BaseJobPb toPb() { + BaseJobPb pb = new BaseJobPb(); + pb.setCreatedTime(createdTime); + pb.setCreatorUserName(creatorUserName); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + pb.setHasMore(hasMore); + pb.setJobId(jobId); + pb.setSettings(settings); + + return pb; + } + + static BaseJob fromPb(BaseJobPb pb) { + BaseJob model = new BaseJob(); + model.setCreatedTime(pb.getCreatedTime()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + model.setHasMore(pb.getHasMore()); + model.setJobId(pb.getJobId()); + model.setSettings(pb.getSettings()); + + return model; + } + + public static class BaseJobSerializer extends JsonSerializer { + @Override + public void serialize(BaseJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BaseJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BaseJobDeserializer extends JsonDeserializer { + @Override + public BaseJob deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BaseJobPb pb = mapper.readValue(p, BaseJobPb.class); + return BaseJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJobPb.java new file mode 100755 index 000000000..7cc0802d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJobPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class BaseJobPb { + @JsonProperty("created_time") + private Long createdTime; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("settings") + private JobSettings settings; + + public BaseJobPb setCreatedTime(Long createdTime) { + this.createdTime = createdTime; + return this; + } + + public Long getCreatedTime() { + return createdTime; + } + + public BaseJobPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public BaseJobPb setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public BaseJobPb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public BaseJobPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public BaseJobPb setSettings(JobSettings settings) { + this.settings = settings; + return this; + } + + public JobSettings getSettings() { + return settings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BaseJobPb that = (BaseJobPb) o; + return Objects.equals(createdTime, that.createdTime) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(hasMore, that.hasMore) + && Objects.equals(jobId, that.jobId) + && Objects.equals(settings, that.settings); + } + + @Override + public int hashCode() { + return Objects.hash( + createdTime, creatorUserName, effectiveBudgetPolicyId, hasMore, jobId, settings); + } + + @Override + public String toString() { + return new ToStringer(BaseJobPb.class) + .add("createdTime", createdTime) + .add("creatorUserName", creatorUserName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("hasMore", hasMore) + .add("jobId", jobId) + .add("settings", settings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index d6864e7ba..2e3dff20b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = BaseRun.BaseRunSerializer.class) +@JsonDeserialize(using = BaseRun.BaseRunDeserializer.class) public class BaseRun { /** * The sequence number of this run attempt for a triggered job run. The initial attempt of a run @@ -17,7 +28,6 @@ public class BaseRun { * original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they * succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job. */ - @JsonProperty("attempt_number") private Long attemptNumber; /** @@ -26,29 +36,24 @@ public class BaseRun { * and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. * The total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("cleanup_duration") private Long cleanupDuration; /** * The cluster used for this run. If the run is specified to use a new cluster, this field is set * once the Jobs service has requested a cluster for the run. */ - @JsonProperty("cluster_instance") private ClusterInstance clusterInstance; /** A snapshot of the job’s cluster specification when this run was created. */ - @JsonProperty("cluster_spec") private ClusterSpec clusterSpec; /** * The creator user name. This field won’t be included in the response if the user has already * been deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** Description of the run */ - @JsonProperty("description") private String description; /** @@ -60,14 +65,12 @@ public class BaseRun { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. */ - @JsonProperty("end_time") private Long endTime; /** @@ -77,7 +80,6 @@ public class BaseRun { * `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("execution_duration") private Long executionDuration; /** @@ -91,7 +93,6 @@ public class BaseRun { *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** @@ -99,7 +100,6 @@ public class BaseRun { * They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 * :method:jobs/listruns requests with `expand_tasks=true`. */ - @JsonProperty("has_more") private Boolean hasMore; /** @@ -108,15 +108,12 @@ public class BaseRun { * task settings. If more than 100 job clusters are available, you can paginate through them using * :method:jobs/getrun. */ - @JsonProperty("job_clusters") private Collection jobClusters; /** The canonical identifier of the job that contains this run. */ - @JsonProperty("job_id") private Long jobId; /** Job-level parameters used in the run */ - @JsonProperty("job_parameters") private Collection jobParameters; /** @@ -124,46 +121,36 @@ public class BaseRun { * populated with the job run ID. For task runs, the field is populated with the ID of the job run * that the task run belongs to. */ - @JsonProperty("job_run_id") private Long jobRunId; /** A unique identifier for this job run. This is set to the same value as `run_id`. */ - @JsonProperty("number_in_job") private Long numberInJob; /** * If this run is a retry of a prior run attempt, this field contains the run_id of the original * attempt; otherwise, it is the same as the run_id. */ - @JsonProperty("original_attempt_run_id") private Long originalAttemptRunId; /** The parameters used for this run. */ - @JsonProperty("overriding_parameters") private RunParameters overridingParameters; /** The time in milliseconds that the run has spent in the queue. */ - @JsonProperty("queue_duration") private Long queueDuration; /** The repair history of the run. */ - @JsonProperty("repair_history") private Collection repairHistory; /** The time in milliseconds it took the job run and all of its repairs to finish. */ - @JsonProperty("run_duration") private Long runDuration; /** The canonical identifier of the run. This ID is unique across all runs of all jobs. */ - @JsonProperty("run_id") private Long runId; /** An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding. */ - @JsonProperty("run_name") private String runName; /** The URL to the detail page of the run. */ - @JsonProperty("run_page_url") private String runPageUrl; /** @@ -174,11 +161,9 @@ public class BaseRun { *

[dbutils.notebook.run]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow */ - @JsonProperty("run_type") private RunType runType; /** The cron schedule that triggered this run if it was triggered by the periodic scheduler. */ - @JsonProperty("schedule") private CronSchedule schedule; /** @@ -188,7 +173,6 @@ public class BaseRun { * the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("setup_duration") private Long setupDuration; /** @@ -196,15 +180,12 @@ public class BaseRun { * This may not be the time when the job task starts executing, for example, if the job is * scheduled to run on a new cluster, this is the time the cluster creation call is issued. */ - @JsonProperty("start_time") private Long startTime; /** Deprecated. Please use the `status` field instead. */ - @JsonProperty("state") private RunState state; /** The current status of the run */ - @JsonProperty("status") private RunStatus status; /** @@ -213,7 +194,6 @@ public class BaseRun { * paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object * root to determine if more results are available. */ - @JsonProperty("tasks") private Collection tasks; /** @@ -228,11 +208,9 @@ public class BaseRun { * triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to * manually restart a continuous job run. */ - @JsonProperty("trigger") private TriggerType trigger; /** Additional details about what triggered the run */ - @JsonProperty("trigger_info") private TriggerInfo triggerInfo; public BaseRun setAttemptNumber(Long attemptNumber) { @@ -648,4 +626,101 @@ public String toString() { .add("triggerInfo", triggerInfo) .toString(); } + + BaseRunPb toPb() { + BaseRunPb pb = new BaseRunPb(); + pb.setAttemptNumber(attemptNumber); + pb.setCleanupDuration(cleanupDuration); + pb.setClusterInstance(clusterInstance); + pb.setClusterSpec(clusterSpec); + pb.setCreatorUserName(creatorUserName); + pb.setDescription(description); + pb.setEffectivePerformanceTarget(effectivePerformanceTarget); + pb.setEndTime(endTime); + pb.setExecutionDuration(executionDuration); + pb.setGitSource(gitSource); + pb.setHasMore(hasMore); + pb.setJobClusters(jobClusters); + pb.setJobId(jobId); + pb.setJobParameters(jobParameters); + pb.setJobRunId(jobRunId); + pb.setNumberInJob(numberInJob); + pb.setOriginalAttemptRunId(originalAttemptRunId); + pb.setOverridingParameters(overridingParameters); + pb.setQueueDuration(queueDuration); + pb.setRepairHistory(repairHistory); + pb.setRunDuration(runDuration); + pb.setRunId(runId); + pb.setRunName(runName); + pb.setRunPageUrl(runPageUrl); + pb.setRunType(runType); + pb.setSchedule(schedule); + pb.setSetupDuration(setupDuration); + pb.setStartTime(startTime); + pb.setState(state); + pb.setStatus(status); + pb.setTasks(tasks); + pb.setTrigger(trigger); + pb.setTriggerInfo(triggerInfo); + + return pb; + } + + static BaseRun fromPb(BaseRunPb pb) { + BaseRun model = new BaseRun(); + model.setAttemptNumber(pb.getAttemptNumber()); + model.setCleanupDuration(pb.getCleanupDuration()); + model.setClusterInstance(pb.getClusterInstance()); + model.setClusterSpec(pb.getClusterSpec()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setDescription(pb.getDescription()); + model.setEffectivePerformanceTarget(pb.getEffectivePerformanceTarget()); + model.setEndTime(pb.getEndTime()); + model.setExecutionDuration(pb.getExecutionDuration()); + model.setGitSource(pb.getGitSource()); + model.setHasMore(pb.getHasMore()); + model.setJobClusters(pb.getJobClusters()); + model.setJobId(pb.getJobId()); + model.setJobParameters(pb.getJobParameters()); + model.setJobRunId(pb.getJobRunId()); + model.setNumberInJob(pb.getNumberInJob()); + model.setOriginalAttemptRunId(pb.getOriginalAttemptRunId()); + model.setOverridingParameters(pb.getOverridingParameters()); + model.setQueueDuration(pb.getQueueDuration()); + model.setRepairHistory(pb.getRepairHistory()); + model.setRunDuration(pb.getRunDuration()); + model.setRunId(pb.getRunId()); + model.setRunName(pb.getRunName()); + model.setRunPageUrl(pb.getRunPageUrl()); + model.setRunType(pb.getRunType()); + model.setSchedule(pb.getSchedule()); + model.setSetupDuration(pb.getSetupDuration()); + model.setStartTime(pb.getStartTime()); + model.setState(pb.getState()); + model.setStatus(pb.getStatus()); + model.setTasks(pb.getTasks()); + model.setTrigger(pb.getTrigger()); + model.setTriggerInfo(pb.getTriggerInfo()); + + return model; + } + + public static class BaseRunSerializer extends JsonSerializer { + @Override + public void serialize(BaseRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BaseRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BaseRunDeserializer extends JsonDeserializer { + @Override + public BaseRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BaseRunPb pb = mapper.readValue(p, BaseRunPb.class); + return BaseRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRunPb.java new file mode 100755 index 000000000..1558c2063 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRunPb.java @@ -0,0 +1,525 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BaseRunPb { + @JsonProperty("attempt_number") + private Long attemptNumber; + + @JsonProperty("cleanup_duration") + private Long cleanupDuration; + + @JsonProperty("cluster_instance") + private ClusterInstance clusterInstance; + + @JsonProperty("cluster_spec") + private ClusterSpec clusterSpec; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("description") + private String description; + + @JsonProperty("effective_performance_target") + private PerformanceTarget effectivePerformanceTarget; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("execution_duration") + private Long executionDuration; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("job_clusters") + private Collection jobClusters; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("job_parameters") + private Collection jobParameters; + + @JsonProperty("job_run_id") + private Long jobRunId; + + @JsonProperty("number_in_job") + private Long numberInJob; + + @JsonProperty("original_attempt_run_id") + private Long originalAttemptRunId; + + @JsonProperty("overriding_parameters") + private RunParameters overridingParameters; + + @JsonProperty("queue_duration") + private Long queueDuration; + + @JsonProperty("repair_history") + private Collection repairHistory; + + @JsonProperty("run_duration") + private Long runDuration; + + @JsonProperty("run_id") + private Long runId; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("run_page_url") + private String runPageUrl; + + @JsonProperty("run_type") + private RunType runType; + + @JsonProperty("schedule") + private CronSchedule schedule; + + @JsonProperty("setup_duration") + private Long setupDuration; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("state") + private RunState state; + + @JsonProperty("status") + private RunStatus status; + + @JsonProperty("tasks") + private Collection tasks; + + @JsonProperty("trigger") + private TriggerType trigger; + + @JsonProperty("trigger_info") + private TriggerInfo triggerInfo; + + public BaseRunPb setAttemptNumber(Long attemptNumber) { + this.attemptNumber = attemptNumber; + return this; + } + + public Long getAttemptNumber() { + return attemptNumber; + } + + public BaseRunPb setCleanupDuration(Long cleanupDuration) { + this.cleanupDuration = cleanupDuration; + return this; + } + + public Long getCleanupDuration() { + return cleanupDuration; + } + + public BaseRunPb setClusterInstance(ClusterInstance clusterInstance) { + this.clusterInstance = clusterInstance; + return this; + } + + public ClusterInstance getClusterInstance() { + return clusterInstance; + } + + public BaseRunPb setClusterSpec(ClusterSpec clusterSpec) { + this.clusterSpec = clusterSpec; + return this; + } + + public ClusterSpec getClusterSpec() { + return clusterSpec; + } + + public BaseRunPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public BaseRunPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public BaseRunPb setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) { + this.effectivePerformanceTarget = effectivePerformanceTarget; + return this; + } + + public PerformanceTarget getEffectivePerformanceTarget() { + return effectivePerformanceTarget; + } + + public BaseRunPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public BaseRunPb setExecutionDuration(Long executionDuration) { + this.executionDuration = executionDuration; + return this; + } + + public Long getExecutionDuration() { + return executionDuration; + } + + public BaseRunPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public BaseRunPb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public BaseRunPb setJobClusters(Collection jobClusters) { + this.jobClusters = jobClusters; + return this; + } + + public Collection getJobClusters() { + return jobClusters; + } + + public BaseRunPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public BaseRunPb setJobParameters(Collection jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Collection getJobParameters() { + return jobParameters; + } + + public BaseRunPb setJobRunId(Long jobRunId) { + this.jobRunId = jobRunId; + return this; + } + + public Long getJobRunId() { + return jobRunId; + } + + public BaseRunPb setNumberInJob(Long numberInJob) { + this.numberInJob = numberInJob; + return this; + } + + public Long getNumberInJob() { + return numberInJob; + } + + public BaseRunPb setOriginalAttemptRunId(Long originalAttemptRunId) { + this.originalAttemptRunId = originalAttemptRunId; + return this; + } + + public Long getOriginalAttemptRunId() { + return originalAttemptRunId; + } + + public BaseRunPb setOverridingParameters(RunParameters overridingParameters) { + this.overridingParameters = overridingParameters; + return this; + } + + public RunParameters getOverridingParameters() { + return overridingParameters; + } + + public BaseRunPb setQueueDuration(Long queueDuration) { + this.queueDuration = queueDuration; + return this; + } + + public Long getQueueDuration() { + return queueDuration; + } + + public BaseRunPb setRepairHistory(Collection repairHistory) { + this.repairHistory = repairHistory; + return this; + } + + public Collection getRepairHistory() { + return repairHistory; + } + + public BaseRunPb setRunDuration(Long runDuration) { + this.runDuration = runDuration; + return this; + } + + public Long getRunDuration() { + return runDuration; + } + + public BaseRunPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + public BaseRunPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public BaseRunPb setRunPageUrl(String runPageUrl) { + this.runPageUrl = runPageUrl; + return this; + } + + public String getRunPageUrl() { + return runPageUrl; + } + + public BaseRunPb setRunType(RunType runType) { + this.runType = runType; + return this; + } + + public RunType getRunType() { + return runType; + } + + public BaseRunPb setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public BaseRunPb setSetupDuration(Long setupDuration) { + this.setupDuration = setupDuration; + return this; + } + + public Long getSetupDuration() { + return setupDuration; + } + + public BaseRunPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public BaseRunPb setState(RunState state) { + this.state = state; + return this; + } + + public RunState getState() { + return state; + } + + public BaseRunPb setStatus(RunStatus status) { + this.status = status; + return this; + } + + public RunStatus getStatus() { + return status; + } + + public BaseRunPb setTasks(Collection tasks) { + this.tasks = tasks; + return this; + } + + public Collection getTasks() { + return tasks; + } + + public BaseRunPb setTrigger(TriggerType trigger) { + this.trigger = trigger; + return this; + } + + public TriggerType getTrigger() { + return trigger; + } + + public BaseRunPb setTriggerInfo(TriggerInfo triggerInfo) { + this.triggerInfo = triggerInfo; + return this; + } + + public TriggerInfo getTriggerInfo() { + return triggerInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BaseRunPb that = (BaseRunPb) o; + return Objects.equals(attemptNumber, that.attemptNumber) + && Objects.equals(cleanupDuration, that.cleanupDuration) + && Objects.equals(clusterInstance, that.clusterInstance) + && Objects.equals(clusterSpec, that.clusterSpec) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(description, that.description) + && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget) + && Objects.equals(endTime, that.endTime) + && Objects.equals(executionDuration, that.executionDuration) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(hasMore, that.hasMore) + && Objects.equals(jobClusters, that.jobClusters) + && Objects.equals(jobId, that.jobId) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(jobRunId, that.jobRunId) + && Objects.equals(numberInJob, that.numberInJob) + && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) + && Objects.equals(overridingParameters, that.overridingParameters) + && Objects.equals(queueDuration, that.queueDuration) + && Objects.equals(repairHistory, that.repairHistory) + && Objects.equals(runDuration, that.runDuration) + && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) + && Objects.equals(runPageUrl, that.runPageUrl) + && Objects.equals(runType, that.runType) + && Objects.equals(schedule, that.schedule) + && Objects.equals(setupDuration, that.setupDuration) + && Objects.equals(startTime, that.startTime) + && Objects.equals(state, that.state) + && Objects.equals(status, that.status) + && Objects.equals(tasks, that.tasks) + && Objects.equals(trigger, that.trigger) + && Objects.equals(triggerInfo, that.triggerInfo); + } + + @Override + public int hashCode() { + return Objects.hash( + attemptNumber, + cleanupDuration, + clusterInstance, + clusterSpec, + creatorUserName, + description, + effectivePerformanceTarget, + endTime, + executionDuration, + gitSource, + hasMore, + jobClusters, + jobId, + jobParameters, + jobRunId, + numberInJob, + originalAttemptRunId, + overridingParameters, + queueDuration, + repairHistory, + runDuration, + runId, + runName, + runPageUrl, + runType, + schedule, + setupDuration, + startTime, + state, + status, + tasks, + trigger, + triggerInfo); + } + + @Override + public String toString() { + return new ToStringer(BaseRunPb.class) + .add("attemptNumber", attemptNumber) + .add("cleanupDuration", cleanupDuration) + .add("clusterInstance", clusterInstance) + .add("clusterSpec", clusterSpec) + .add("creatorUserName", creatorUserName) + .add("description", description) + .add("effectivePerformanceTarget", effectivePerformanceTarget) + .add("endTime", endTime) + .add("executionDuration", executionDuration) + .add("gitSource", gitSource) + .add("hasMore", hasMore) + .add("jobClusters", jobClusters) + .add("jobId", jobId) + .add("jobParameters", jobParameters) + .add("jobRunId", jobRunId) + .add("numberInJob", numberInJob) + .add("originalAttemptRunId", originalAttemptRunId) + .add("overridingParameters", overridingParameters) + .add("queueDuration", queueDuration) + .add("repairHistory", repairHistory) + .add("runDuration", runDuration) + .add("runId", runId) + .add("runName", runName) + .add("runPageUrl", runPageUrl) + .add("runType", runType) + .add("schedule", schedule) + .add("setupDuration", setupDuration) + .add("startTime", startTime) + .add("state", state) + .add("status", status) + .add("tasks", tasks) + .add("trigger", trigger) + .add("triggerInfo", triggerInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java index 55128f8a8..4a26f0f77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelAllRuns.CancelAllRunsSerializer.class) +@JsonDeserialize(using = CancelAllRuns.CancelAllRunsDeserializer.class) public class CancelAllRuns { /** * Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs * in the workspace are canceled. */ - @JsonProperty("all_queued_runs") private Boolean allQueuedRuns; /** The canonical identifier of the job to cancel all runs of. */ - @JsonProperty("job_id") private Long jobId; public CancelAllRuns setAllQueuedRuns(Boolean allQueuedRuns) { @@ -58,4 +67,39 @@ public String toString() { .add("jobId", jobId) .toString(); } + + CancelAllRunsPb toPb() { + CancelAllRunsPb pb = new CancelAllRunsPb(); + pb.setAllQueuedRuns(allQueuedRuns); + pb.setJobId(jobId); + + return pb; + } + + static CancelAllRuns fromPb(CancelAllRunsPb pb) { + CancelAllRuns model = new CancelAllRuns(); + model.setAllQueuedRuns(pb.getAllQueuedRuns()); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class CancelAllRunsSerializer extends JsonSerializer { + @Override + public void serialize(CancelAllRuns value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelAllRunsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelAllRunsDeserializer extends JsonDeserializer { + @Override + public CancelAllRuns deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelAllRunsPb pb = mapper.readValue(p, CancelAllRunsPb.class); + return CancelAllRuns.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsPb.java new file mode 100755 index 000000000..b0538e96d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CancelAllRunsPb { + @JsonProperty("all_queued_runs") + private Boolean allQueuedRuns; + + @JsonProperty("job_id") + private Long jobId; + + public CancelAllRunsPb setAllQueuedRuns(Boolean allQueuedRuns) { + this.allQueuedRuns = allQueuedRuns; + return this; + } + + public Boolean getAllQueuedRuns() { + return allQueuedRuns; + } + + public CancelAllRunsPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelAllRunsPb that = (CancelAllRunsPb) o; + return Objects.equals(allQueuedRuns, that.allQueuedRuns) && Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(allQueuedRuns, jobId); + } + + @Override + public String toString() { + return new ToStringer(CancelAllRunsPb.class) + .add("allQueuedRuns", allQueuedRuns) + .add("jobId", jobId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java index c0b570c3f..52d9c54f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelAllRunsResponse.CancelAllRunsResponseSerializer.class) +@JsonDeserialize(using = CancelAllRunsResponse.CancelAllRunsResponseDeserializer.class) public class CancelAllRunsResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(CancelAllRunsResponse.class).toString(); } + + CancelAllRunsResponsePb toPb() { + CancelAllRunsResponsePb pb = new CancelAllRunsResponsePb(); + + return pb; + } + + static CancelAllRunsResponse fromPb(CancelAllRunsResponsePb pb) { + CancelAllRunsResponse model = new CancelAllRunsResponse(); + + return model; + } + + public static class CancelAllRunsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CancelAllRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelAllRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelAllRunsResponseDeserializer + extends JsonDeserializer { + @Override + public CancelAllRunsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelAllRunsResponsePb pb = mapper.readValue(p, CancelAllRunsResponsePb.class); + return CancelAllRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponsePb.java new file mode 100755 index 000000000..52003732f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CancelAllRunsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CancelAllRunsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java index f33c7c5ab..b79a11a4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelRun.CancelRunSerializer.class) +@JsonDeserialize(using = CancelRun.CancelRunDeserializer.class) public class CancelRun { /** This field is required. */ - @JsonProperty("run_id") private Long runId; public CancelRun setRunId(Long runId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(CancelRun.class).add("runId", runId).toString(); } + + CancelRunPb toPb() { + CancelRunPb pb = new CancelRunPb(); + pb.setRunId(runId); + + return pb; + } + + static CancelRun fromPb(CancelRunPb pb) { + CancelRun model = new CancelRun(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class CancelRunSerializer extends JsonSerializer { + @Override + public void serialize(CancelRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelRunDeserializer extends JsonDeserializer { + @Override + public CancelRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelRunPb pb = mapper.readValue(p, CancelRunPb.class); + return CancelRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunPb.java new file mode 100755 index 000000000..7e1beaf77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CancelRunPb { + @JsonProperty("run_id") + private Long runId; + + public CancelRunPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelRunPb that = (CancelRunPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(CancelRunPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java index 8c956bd00..bca6ba514 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelRunResponse.CancelRunResponseSerializer.class) +@JsonDeserialize(using = CancelRunResponse.CancelRunResponseDeserializer.class) public class CancelRunResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(CancelRunResponse.class).toString(); } + + CancelRunResponsePb toPb() { + CancelRunResponsePb pb = new CancelRunResponsePb(); + + return pb; + } + + static CancelRunResponse fromPb(CancelRunResponsePb pb) { + CancelRunResponse model = new CancelRunResponse(); + + return model; + } + + public static class CancelRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(CancelRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelRunResponseDeserializer extends JsonDeserializer { + @Override + public CancelRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelRunResponsePb pb = mapper.readValue(p, CancelRunResponsePb.class); + return CancelRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponsePb.java new file mode 100755 index 000000000..7cab6f93e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CancelRunResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CancelRunResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java index ede9e9aa1..a02d1cdd7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Stores the run state of the clean rooms notebook task. */ @Generated +@JsonSerialize(using = CleanRoomTaskRunState.CleanRoomTaskRunStateSerializer.class) +@JsonDeserialize(using = CleanRoomTaskRunState.CleanRoomTaskRunStateDeserializer.class) public class CleanRoomTaskRunState { /** * A value indicating the run's current lifecycle state. This field is always available in the * response. Note: Additional states might be introduced in future releases. */ - @JsonProperty("life_cycle_state") private CleanRoomTaskRunLifeCycleState lifeCycleState; /** * A value indicating the run's result. This field is only available for terminal lifecycle * states. Note: Additional states might be introduced in future releases. */ - @JsonProperty("result_state") private CleanRoomTaskRunResultState resultState; public CleanRoomTaskRunState setLifeCycleState(CleanRoomTaskRunLifeCycleState lifeCycleState) { @@ -63,4 +72,43 @@ public String toString() { .add("resultState", resultState) .toString(); } + + CleanRoomTaskRunStatePb toPb() { + CleanRoomTaskRunStatePb pb = new CleanRoomTaskRunStatePb(); + pb.setLifeCycleState(lifeCycleState); + pb.setResultState(resultState); + + return pb; + } + + static CleanRoomTaskRunState fromPb(CleanRoomTaskRunStatePb pb) { + CleanRoomTaskRunState model = new CleanRoomTaskRunState(); + model.setLifeCycleState(pb.getLifeCycleState()); + model.setResultState(pb.getResultState()); + + return model; + } + + public static class CleanRoomTaskRunStateSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomTaskRunState value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomTaskRunStatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomTaskRunStateDeserializer + extends JsonDeserializer { + @Override + public CleanRoomTaskRunState deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomTaskRunStatePb pb = mapper.readValue(p, CleanRoomTaskRunStatePb.class); + return CleanRoomTaskRunState.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunStatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunStatePb.java new file mode 100755 index 000000000..4a82211be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunStatePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Stores the run state of the clean rooms notebook task. */ +@Generated +class CleanRoomTaskRunStatePb { + @JsonProperty("life_cycle_state") + private CleanRoomTaskRunLifeCycleState lifeCycleState; + + @JsonProperty("result_state") + private CleanRoomTaskRunResultState resultState; + + public CleanRoomTaskRunStatePb setLifeCycleState(CleanRoomTaskRunLifeCycleState lifeCycleState) { + this.lifeCycleState = lifeCycleState; + return this; + } + + public CleanRoomTaskRunLifeCycleState getLifeCycleState() { + return lifeCycleState; + } + + public CleanRoomTaskRunStatePb setResultState(CleanRoomTaskRunResultState resultState) { + this.resultState = resultState; + return this; + } + + public CleanRoomTaskRunResultState getResultState() { + return resultState; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomTaskRunStatePb that = (CleanRoomTaskRunStatePb) o; + return Objects.equals(lifeCycleState, that.lifeCycleState) + && Objects.equals(resultState, that.resultState); + } + + @Override + public int hashCode() { + return Objects.hash(lifeCycleState, resultState); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomTaskRunStatePb.class) + .add("lifeCycleState", lifeCycleState) + .add("resultState", resultState) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java index 2c53eebed..3732458f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java @@ -4,29 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CleanRoomsNotebookTask.CleanRoomsNotebookTaskSerializer.class) +@JsonDeserialize(using = CleanRoomsNotebookTask.CleanRoomsNotebookTaskDeserializer.class) public class CleanRoomsNotebookTask { /** The clean room that the notebook belongs to. */ - @JsonProperty("clean_room_name") private String cleanRoomName; /** * Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the * latest version). It can be fetched by calling the :method:cleanroomassets/get API. */ - @JsonProperty("etag") private String etag; /** Base parameters to be used for the clean room notebook job. */ - @JsonProperty("notebook_base_parameters") private Map notebookBaseParameters; /** Name of the notebook being run. */ - @JsonProperty("notebook_name") private String notebookName; public CleanRoomsNotebookTask setCleanRoomName(String cleanRoomName) { @@ -91,4 +98,47 @@ public String toString() { .add("notebookName", notebookName) .toString(); } + + CleanRoomsNotebookTaskPb toPb() { + CleanRoomsNotebookTaskPb pb = new CleanRoomsNotebookTaskPb(); + pb.setCleanRoomName(cleanRoomName); + pb.setEtag(etag); + pb.setNotebookBaseParameters(notebookBaseParameters); + pb.setNotebookName(notebookName); + + return pb; + } + + static CleanRoomsNotebookTask fromPb(CleanRoomsNotebookTaskPb pb) { + CleanRoomsNotebookTask model = new CleanRoomsNotebookTask(); + model.setCleanRoomName(pb.getCleanRoomName()); + model.setEtag(pb.getEtag()); + model.setNotebookBaseParameters(pb.getNotebookBaseParameters()); + model.setNotebookName(pb.getNotebookName()); + + return model; + } + + public static class CleanRoomsNotebookTaskSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomsNotebookTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CleanRoomsNotebookTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomsNotebookTaskDeserializer + extends JsonDeserializer { + @Override + public CleanRoomsNotebookTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomsNotebookTaskPb pb = mapper.readValue(p, CleanRoomsNotebookTaskPb.class); + return CleanRoomsNotebookTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java index b2b669568..06e9f0eec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput + .CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputSerializer.class) +@JsonDeserialize( + using = + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput + .CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputDeserializer.class) public class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput { /** The run state of the clean rooms notebook task. */ - @JsonProperty("clean_room_job_run_state") private CleanRoomTaskRunState cleanRoomJobRunState; /** The notebook output for the clean room run */ - @JsonProperty("notebook_output") private NotebookOutput notebookOutput; /** Information on how to access the output schema for the clean room run */ - @JsonProperty("output_schema_info") private OutputSchemaInfo outputSchemaInfo; public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput setCleanRoomJobRunState( @@ -75,4 +89,51 @@ public String toString() { .add("outputSchemaInfo", outputSchemaInfo) .toString(); } + + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb toPb() { + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb pb = + new CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb(); + pb.setCleanRoomJobRunState(cleanRoomJobRunState); + pb.setNotebookOutput(notebookOutput); + pb.setOutputSchemaInfo(outputSchemaInfo); + + return pb; + } + + static CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput fromPb( + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb pb) { + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput model = + new CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput(); + model.setCleanRoomJobRunState(pb.getCleanRoomJobRunState()); + model.setNotebookOutput(pb.getNotebookOutput()); + model.setOutputSchemaInfo(pb.getOutputSchemaInfo()); + + return model; + } + + public static class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputDeserializer + extends JsonDeserializer { + @Override + public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb pb = + mapper.readValue(p, CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb.class); + return CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb.java new file mode 100755 index 000000000..fbf018f30 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb { + @JsonProperty("clean_room_job_run_state") + private CleanRoomTaskRunState cleanRoomJobRunState; + + @JsonProperty("notebook_output") + private NotebookOutput notebookOutput; + + @JsonProperty("output_schema_info") + private OutputSchemaInfo outputSchemaInfo; + + public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb setCleanRoomJobRunState( + CleanRoomTaskRunState cleanRoomJobRunState) { + this.cleanRoomJobRunState = cleanRoomJobRunState; + return this; + } + + public CleanRoomTaskRunState getCleanRoomJobRunState() { + return cleanRoomJobRunState; + } + + public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb setNotebookOutput( + NotebookOutput notebookOutput) { + this.notebookOutput = notebookOutput; + return this; + } + + public NotebookOutput getNotebookOutput() { + return notebookOutput; + } + + public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb setOutputSchemaInfo( + OutputSchemaInfo outputSchemaInfo) { + this.outputSchemaInfo = outputSchemaInfo; + return this; + } + + public OutputSchemaInfo getOutputSchemaInfo() { + return outputSchemaInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb that = + (CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb) o; + return Objects.equals(cleanRoomJobRunState, that.cleanRoomJobRunState) + && Objects.equals(notebookOutput, that.notebookOutput) + && Objects.equals(outputSchemaInfo, that.outputSchemaInfo); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoomJobRunState, notebookOutput, outputSchemaInfo); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutputPb.class) + .add("cleanRoomJobRunState", cleanRoomJobRunState) + .add("notebookOutput", notebookOutput) + .add("outputSchemaInfo", outputSchemaInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskPb.java new file mode 100755 index 000000000..8bb310c8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CleanRoomsNotebookTaskPb { + @JsonProperty("clean_room_name") + private String cleanRoomName; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("notebook_base_parameters") + private Map notebookBaseParameters; + + @JsonProperty("notebook_name") + private String notebookName; + + public CleanRoomsNotebookTaskPb setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + + public CleanRoomsNotebookTaskPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public CleanRoomsNotebookTaskPb setNotebookBaseParameters( + Map notebookBaseParameters) { + this.notebookBaseParameters = notebookBaseParameters; + return this; + } + + public Map getNotebookBaseParameters() { + return notebookBaseParameters; + } + + public CleanRoomsNotebookTaskPb setNotebookName(String notebookName) { + this.notebookName = notebookName; + return this; + } + + public String getNotebookName() { + return notebookName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomsNotebookTaskPb that = (CleanRoomsNotebookTaskPb) o; + return Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(etag, that.etag) + && Objects.equals(notebookBaseParameters, that.notebookBaseParameters) + && Objects.equals(notebookName, that.notebookName); + } + + @Override + public int hashCode() { + return Objects.hash(cleanRoomName, etag, notebookBaseParameters, notebookName); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomsNotebookTaskPb.class) + .add("cleanRoomName", cleanRoomName) + .add("etag", etag) + .add("notebookBaseParameters", notebookBaseParameters) + .add("notebookName", notebookName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java index cbeb87b3e..ee53e55c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterInstance.ClusterInstanceSerializer.class) +@JsonDeserialize(using = ClusterInstance.ClusterInstanceDeserializer.class) public class ClusterInstance { /** * The canonical identifier for the cluster used by a run. This field is always available for runs @@ -18,7 +29,6 @@ public class ClusterInstance { * *

The response won’t include this field if the identifier is not available yet. */ - @JsonProperty("cluster_id") private String clusterId; /** @@ -29,7 +39,6 @@ public class ClusterInstance { * *

The response won’t include this field if the identifier is not available yet. */ - @JsonProperty("spark_context_id") private String sparkContextId; public ClusterInstance setClusterId(String clusterId) { @@ -71,4 +80,40 @@ public String toString() { .add("sparkContextId", sparkContextId) .toString(); } + + ClusterInstancePb toPb() { + ClusterInstancePb pb = new ClusterInstancePb(); + pb.setClusterId(clusterId); + pb.setSparkContextId(sparkContextId); + + return pb; + } + + static ClusterInstance fromPb(ClusterInstancePb pb) { + ClusterInstance model = new ClusterInstance(); + model.setClusterId(pb.getClusterId()); + model.setSparkContextId(pb.getSparkContextId()); + + return model; + } + + public static class ClusterInstanceSerializer extends JsonSerializer { + @Override + public void serialize(ClusterInstance value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterInstancePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterInstanceDeserializer extends JsonDeserializer { + @Override + public ClusterInstance deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterInstancePb pb = mapper.readValue(p, ClusterInstancePb.class); + return ClusterInstance.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstancePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstancePb.java new file mode 100755 index 000000000..746ee062f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstancePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterInstancePb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("spark_context_id") + private String sparkContextId; + + public ClusterInstancePb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterInstancePb setSparkContextId(String sparkContextId) { + this.sparkContextId = sparkContextId; + return this; + } + + public String getSparkContextId() { + return sparkContextId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterInstancePb that = (ClusterInstancePb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(sparkContextId, that.sparkContextId); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, sparkContextId); + } + + @Override + public String toString() { + return new ToStringer(ClusterInstancePb.class) + .add("clusterId", clusterId) + .add("sparkContextId", sparkContextId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java index 4e080a1e3..3178c1a5d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java @@ -4,36 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterSpec.ClusterSpecSerializer.class) +@JsonDeserialize(using = ClusterSpec.ClusterSpecDeserializer.class) public class ClusterSpec { /** * If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops * responding. We suggest running jobs and tasks on new clusters for greater reliability */ - @JsonProperty("existing_cluster_id") private String existingClusterId; /** * If job_cluster_key, this task is executed reusing the cluster specified in * `job.settings.job_clusters`. */ - @JsonProperty("job_cluster_key") private String jobClusterKey; /** * An optional list of libraries to be installed on the cluster. The default value is an empty * list. */ - @JsonProperty("libraries") private Collection libraries; /** If new_cluster, a description of a new cluster that is created for each run. */ - @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; public ClusterSpec setExistingClusterId(String existingClusterId) { @@ -98,4 +105,43 @@ public String toString() { .add("newCluster", newCluster) .toString(); } + + ClusterSpecPb toPb() { + ClusterSpecPb pb = new ClusterSpecPb(); + pb.setExistingClusterId(existingClusterId); + pb.setJobClusterKey(jobClusterKey); + pb.setLibraries(libraries); + pb.setNewCluster(newCluster); + + return pb; + } + + static ClusterSpec fromPb(ClusterSpecPb pb) { + ClusterSpec model = new ClusterSpec(); + model.setExistingClusterId(pb.getExistingClusterId()); + model.setJobClusterKey(pb.getJobClusterKey()); + model.setLibraries(pb.getLibraries()); + model.setNewCluster(pb.getNewCluster()); + + return model; + } + + public static class ClusterSpecSerializer extends JsonSerializer { + @Override + public void serialize(ClusterSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterSpecDeserializer extends JsonDeserializer { + @Override + public ClusterSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterSpecPb pb = mapper.readValue(p, ClusterSpecPb.class); + return ClusterSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpecPb.java new file mode 100755 index 000000000..7d1cfddd1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpecPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ClusterSpecPb { + @JsonProperty("existing_cluster_id") + private String existingClusterId; + + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("new_cluster") + private com.databricks.sdk.service.compute.ClusterSpec newCluster; + + public ClusterSpecPb setExistingClusterId(String existingClusterId) { + this.existingClusterId = existingClusterId; + return this; + } + + public String getExistingClusterId() { + return existingClusterId; + } + + public ClusterSpecPb setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + + public ClusterSpecPb setLibraries( + Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public ClusterSpecPb setNewCluster(com.databricks.sdk.service.compute.ClusterSpec newCluster) { + this.newCluster = newCluster; + return this; + } + + public com.databricks.sdk.service.compute.ClusterSpec getNewCluster() { + return newCluster; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterSpecPb that = (ClusterSpecPb) o; + return Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(jobClusterKey, that.jobClusterKey) + && Objects.equals(libraries, that.libraries) + && Objects.equals(newCluster, that.newCluster); + } + + @Override + public int hashCode() { + return Objects.hash(existingClusterId, jobClusterKey, libraries, newCluster); + } + + @Override + public String toString() { + return new ToStringer(ClusterSpecPb.class) + .add("existingClusterId", existingClusterId) + .add("jobClusterKey", jobClusterKey) + .add("libraries", libraries) + .add("newCluster", newCluster) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java index af181cd7f..c7ec1ad94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ComputeConfig.ComputeConfigSerializer.class) +@JsonDeserialize(using = ComputeConfig.ComputeConfigDeserializer.class) public class ComputeConfig { /** IDof the GPU pool to use. */ - @JsonProperty("gpu_node_pool_id") private String gpuNodePoolId; /** GPU type. */ - @JsonProperty("gpu_type") private String gpuType; /** Number of GPUs. */ - @JsonProperty("num_gpus") private Long numGpus; public ComputeConfig setGpuNodePoolId(String gpuNodePoolId) { @@ -71,4 +79,41 @@ public String toString() { .add("numGpus", numGpus) .toString(); } + + ComputeConfigPb toPb() { + ComputeConfigPb pb = new ComputeConfigPb(); + pb.setGpuNodePoolId(gpuNodePoolId); + pb.setGpuType(gpuType); + pb.setNumGpus(numGpus); + + return pb; + } + + static ComputeConfig fromPb(ComputeConfigPb pb) { + ComputeConfig model = new ComputeConfig(); + model.setGpuNodePoolId(pb.getGpuNodePoolId()); + model.setGpuType(pb.getGpuType()); + model.setNumGpus(pb.getNumGpus()); + + return model; + } + + public static class ComputeConfigSerializer extends JsonSerializer { + @Override + public void serialize(ComputeConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComputeConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComputeConfigDeserializer extends JsonDeserializer { + @Override + public ComputeConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComputeConfigPb pb = mapper.readValue(p, ComputeConfigPb.class); + return ComputeConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfigPb.java new file mode 100755 index 000000000..fa985343d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfigPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ComputeConfigPb { + @JsonProperty("gpu_node_pool_id") + private String gpuNodePoolId; + + @JsonProperty("gpu_type") + private String gpuType; + + @JsonProperty("num_gpus") + private Long numGpus; + + public ComputeConfigPb setGpuNodePoolId(String gpuNodePoolId) { + this.gpuNodePoolId = gpuNodePoolId; + return this; + } + + public String getGpuNodePoolId() { + return gpuNodePoolId; + } + + public ComputeConfigPb setGpuType(String gpuType) { + this.gpuType = gpuType; + return this; + } + + public String getGpuType() { + return gpuType; + } + + public ComputeConfigPb setNumGpus(Long numGpus) { + this.numGpus = numGpus; + return this; + } + + public Long getNumGpus() { + return numGpus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComputeConfigPb that = (ComputeConfigPb) o; + return Objects.equals(gpuNodePoolId, that.gpuNodePoolId) + && Objects.equals(gpuType, that.gpuType) + && Objects.equals(numGpus, that.numGpus); + } + + @Override + public int hashCode() { + return Objects.hash(gpuNodePoolId, gpuType, numGpus); + } + + @Override + public String toString() { + return new ToStringer(ComputeConfigPb.class) + .add("gpuNodePoolId", gpuNodePoolId) + .add("gpuType", gpuType) + .add("numGpus", numGpus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java index 3a5badda6..be1bb51f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ConditionTask.ConditionTaskSerializer.class) +@JsonDeserialize(using = ConditionTask.ConditionTaskDeserializer.class) public class ConditionTask { /** * The left operand of the condition task. Can be either a string value or a job state or * parameter reference. */ - @JsonProperty("left") private String left; /** @@ -26,14 +36,12 @@ public class ConditionTask { * `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or * `“false”` for the comparison. */ - @JsonProperty("op") private ConditionTaskOp op; /** * The right operand of the condition task. Can be either a string value or a job state or * parameter reference. */ - @JsonProperty("right") private String right; public ConditionTask setLeft(String left) { @@ -86,4 +94,41 @@ public String toString() { .add("right", right) .toString(); } + + ConditionTaskPb toPb() { + ConditionTaskPb pb = new ConditionTaskPb(); + pb.setLeft(left); + pb.setOp(op); + pb.setRight(right); + + return pb; + } + + static ConditionTask fromPb(ConditionTaskPb pb) { + ConditionTask model = new ConditionTask(); + model.setLeft(pb.getLeft()); + model.setOp(pb.getOp()); + model.setRight(pb.getRight()); + + return model; + } + + public static class ConditionTaskSerializer extends JsonSerializer { + @Override + public void serialize(ConditionTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ConditionTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ConditionTaskDeserializer extends JsonDeserializer { + @Override + public ConditionTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ConditionTaskPb pb = mapper.readValue(p, ConditionTaskPb.class); + return ConditionTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskPb.java new file mode 100755 index 000000000..0b16d1ec7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ConditionTaskPb { + @JsonProperty("left") + private String left; + + @JsonProperty("op") + private ConditionTaskOp op; + + @JsonProperty("right") + private String right; + + public ConditionTaskPb setLeft(String left) { + this.left = left; + return this; + } + + public String getLeft() { + return left; + } + + public ConditionTaskPb setOp(ConditionTaskOp op) { + this.op = op; + return this; + } + + public ConditionTaskOp getOp() { + return op; + } + + public ConditionTaskPb setRight(String right) { + this.right = right; + return this; + } + + public String getRight() { + return right; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConditionTaskPb that = (ConditionTaskPb) o; + return Objects.equals(left, that.left) + && Objects.equals(op, that.op) + && Objects.equals(right, that.right); + } + + @Override + public int hashCode() { + return Objects.hash(left, op, right); + } + + @Override + public String toString() { + return new ToStringer(ConditionTaskPb.class) + .add("left", left) + .add("op", op) + .add("right", right) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java index 7b239e250..dec648a17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Continuous.ContinuousSerializer.class) +@JsonDeserialize(using = Continuous.ContinuousDeserializer.class) public class Continuous { /** * Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED. */ - @JsonProperty("pause_status") private PauseStatus pauseStatus; public Continuous setPauseStatus(PauseStatus pauseStatus) { @@ -41,4 +51,37 @@ public int hashCode() { public String toString() { return new ToStringer(Continuous.class).add("pauseStatus", pauseStatus).toString(); } + + ContinuousPb toPb() { + ContinuousPb pb = new ContinuousPb(); + pb.setPauseStatus(pauseStatus); + + return pb; + } + + static Continuous fromPb(ContinuousPb pb) { + Continuous model = new Continuous(); + model.setPauseStatus(pb.getPauseStatus()); + + return model; + } + + public static class ContinuousSerializer extends JsonSerializer { + @Override + public void serialize(Continuous value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ContinuousPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ContinuousDeserializer extends JsonDeserializer { + @Override + public Continuous deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ContinuousPb pb = mapper.readValue(p, ContinuousPb.class); + return Continuous.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ContinuousPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ContinuousPb.java new file mode 100755 index 000000000..7765640eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ContinuousPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ContinuousPb { + @JsonProperty("pause_status") + private PauseStatus pauseStatus; + + public ContinuousPb setPauseStatus(PauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public PauseStatus getPauseStatus() { + return pauseStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContinuousPb that = (ContinuousPb) o; + return Objects.equals(pauseStatus, that.pauseStatus); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus); + } + + @Override + public String toString() { + return new ToStringer(ContinuousPb.class).add("pauseStatus", pauseStatus).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Converters.java new file mode 100755 index 000000000..2385cc851 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.jobs; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index 527597bec..d2237828a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateJob.CreateJobSerializer.class) +@JsonDeserialize(using = CreateJob.CreateJobDeserializer.class) public class CreateJob { /** List of permissions to set on the job. */ - @JsonProperty("access_control_list") private Collection accessControlList; /** @@ -20,24 +30,20 @@ public class CreateJob { * budget policy may be applied when creating or modifying the job. See * `effective_budget_policy_id` for the budget policy used by this workload. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. */ - @JsonProperty("continuous") private Continuous continuous; /** Deployment information for jobs managed by external sources. */ - @JsonProperty("deployment") private JobDeployment deployment; /** * An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. */ - @JsonProperty("description") private String description; /** @@ -46,14 +52,12 @@ public class CreateJob { *

* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job * is in an editable state and can be modified. */ - @JsonProperty("edit_mode") private JobEditMode editMode; /** * An optional set of email addresses that is notified when runs of this job begin or complete as * well as when this job is deleted. */ - @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; /** @@ -63,14 +67,12 @@ public class CreateJob { * serverless tasks, the task environment is required to be specified using environment_key in the * task settings. */ - @JsonProperty("environments") private Collection environments; /** * Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. * When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. */ - @JsonProperty("format") private Format format; /** @@ -84,11 +86,9 @@ public class CreateJob { *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** An optional set of health rules that can be defined for this job. */ - @JsonProperty("health") private JobsHealthRules health; /** @@ -96,7 +96,6 @@ public class CreateJob { * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in * task settings. */ - @JsonProperty("job_clusters") private Collection jobClusters; /** @@ -109,22 +108,18 @@ public class CreateJob { * runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ - @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; /** An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding. */ - @JsonProperty("name") private String name; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this job. */ - @JsonProperty("notification_settings") private JobNotificationSettings notificationSettings; /** Job-level parameter definitions */ - @JsonProperty("parameters") private Collection parameters; /** @@ -135,11 +130,9 @@ public class CreateJob { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("performance_target") private PerformanceTarget performanceTarget; /** The queue settings of the job. */ - @JsonProperty("queue") private QueueSettings queue; /** @@ -149,14 +142,12 @@ public class CreateJob { *

Either `user_name` or `service_principal_name` should be specified. If not, an error is * thrown. */ - @JsonProperty("run_as") private JobRunAs runAs; /** * An optional periodic schedule for this job. The default behavior is that the job only runs when * triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. */ - @JsonProperty("schedule") private CronSchedule schedule; /** @@ -164,7 +155,6 @@ public class CreateJob { * jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags * can be added to the job. */ - @JsonProperty("tags") private Map tags; /** @@ -174,11 +164,9 @@ public class CreateJob { * available, you can paginate through them using :method:jobs/get. Use the `next_page_token` * field at the object root to determine if more results are available. */ - @JsonProperty("tasks") private Collection tasks; /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** @@ -186,13 +174,14 @@ public class CreateJob { * the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request * to `runNow`. */ - @JsonProperty("trigger") private TriggerSettings trigger; /** A collection of system notification IDs to notify when runs of this job begin or complete. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; + /** */ + private AllWellKnown wellKnown; + public CreateJob setAccessControlList(Collection accessControlList) { this.accessControlList = accessControlList; return this; @@ -418,6 +407,15 @@ public WebhookNotifications getWebhookNotifications() { return webhookNotifications; } + public CreateJob setWellKnown(AllWellKnown wellKnown) { + this.wellKnown = wellKnown; + return this; + } + + public AllWellKnown getWellKnown() { + return wellKnown; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -447,7 +445,8 @@ public boolean equals(Object o) { && Objects.equals(tasks, that.tasks) && Objects.equals(timeoutSeconds, that.timeoutSeconds) && Objects.equals(trigger, that.trigger) - && Objects.equals(webhookNotifications, that.webhookNotifications); + && Objects.equals(webhookNotifications, that.webhookNotifications) + && Objects.equals(wellKnown, that.wellKnown); } @Override @@ -477,7 +476,8 @@ public int hashCode() { tasks, timeoutSeconds, trigger, - webhookNotifications); + webhookNotifications, + wellKnown); } @Override @@ -508,6 +508,90 @@ public String toString() { .add("timeoutSeconds", timeoutSeconds) .add("trigger", trigger) .add("webhookNotifications", webhookNotifications) + .add("wellKnown", wellKnown) .toString(); } + + CreateJobPb toPb() { + CreateJobPb pb = new CreateJobPb(); + pb.setAccessControlList(accessControlList); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setContinuous(continuous); + pb.setDeployment(deployment); + pb.setDescription(description); + pb.setEditMode(editMode); + pb.setEmailNotifications(emailNotifications); + pb.setEnvironments(environments); + pb.setFormat(format); + pb.setGitSource(gitSource); + pb.setHealth(health); + pb.setJobClusters(jobClusters); + pb.setMaxConcurrentRuns(maxConcurrentRuns); + pb.setName(name); + pb.setNotificationSettings(notificationSettings); + pb.setParameters(parameters); + pb.setPerformanceTarget(performanceTarget); + pb.setQueue(queue); + pb.setRunAs(runAs); + pb.setSchedule(schedule); + pb.setTags(tags); + pb.setTasks(tasks); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setTrigger(trigger); + pb.setWebhookNotifications(webhookNotifications); + pb.setWellKnown(wellKnown); + + return pb; + } + + static CreateJob fromPb(CreateJobPb pb) { + CreateJob model = new CreateJob(); + model.setAccessControlList(pb.getAccessControlList()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setContinuous(pb.getContinuous()); + model.setDeployment(pb.getDeployment()); + model.setDescription(pb.getDescription()); + model.setEditMode(pb.getEditMode()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEnvironments(pb.getEnvironments()); + model.setFormat(pb.getFormat()); + model.setGitSource(pb.getGitSource()); + model.setHealth(pb.getHealth()); + model.setJobClusters(pb.getJobClusters()); + model.setMaxConcurrentRuns(pb.getMaxConcurrentRuns()); + model.setName(pb.getName()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setParameters(pb.getParameters()); + model.setPerformanceTarget(pb.getPerformanceTarget()); + model.setQueue(pb.getQueue()); + model.setRunAs(pb.getRunAs()); + model.setSchedule(pb.getSchedule()); + model.setTags(pb.getTags()); + model.setTasks(pb.getTasks()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setTrigger(pb.getTrigger()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + model.setWellKnown(pb.getWellKnown()); + + return model; + } + + public static class CreateJobSerializer extends JsonSerializer { + @Override + public void serialize(CreateJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateJobDeserializer extends JsonDeserializer { + @Override + public CreateJob deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateJobPb pb = mapper.readValue(p, CreateJobPb.class); + return CreateJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobPb.java new file mode 100755 index 000000000..0631506ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobPb.java @@ -0,0 +1,421 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateJobPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("continuous") + private Continuous continuous; + + @JsonProperty("deployment") + private JobDeployment deployment; + + @JsonProperty("description") + private String description; + + @JsonProperty("edit_mode") + private JobEditMode editMode; + + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + + @JsonProperty("environments") + private Collection environments; + + @JsonProperty("format") + private Format format; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("health") + private JobsHealthRules health; + + @JsonProperty("job_clusters") + private Collection jobClusters; + + @JsonProperty("max_concurrent_runs") + private Long maxConcurrentRuns; + + @JsonProperty("name") + private String name; + + @JsonProperty("notification_settings") + private JobNotificationSettings notificationSettings; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("performance_target") + private PerformanceTarget performanceTarget; + + @JsonProperty("queue") + private QueueSettings queue; + + @JsonProperty("run_as") + private JobRunAs runAs; + + @JsonProperty("schedule") + private CronSchedule schedule; + + @JsonProperty("tags") + private Map tags; + + @JsonProperty("tasks") + private Collection tasks; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("trigger") + private TriggerSettings trigger; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + @JsonProperty("well_known") + private AllWellKnown wellKnown; + + public CreateJobPb setAccessControlList(Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public CreateJobPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreateJobPb setContinuous(Continuous continuous) { + this.continuous = continuous; + return this; + } + + public Continuous getContinuous() { + return continuous; + } + + public CreateJobPb setDeployment(JobDeployment deployment) { + this.deployment = deployment; + return this; + } + + public JobDeployment getDeployment() { + return deployment; + } + + public CreateJobPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateJobPb setEditMode(JobEditMode editMode) { + this.editMode = editMode; + return this; + } + + public JobEditMode getEditMode() { + return editMode; + } + + public CreateJobPb setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public CreateJobPb setEnvironments(Collection environments) { + this.environments = environments; + return this; + } + + public Collection getEnvironments() { + return environments; + } + + public CreateJobPb setFormat(Format format) { + this.format = format; + return this; + } + + public Format getFormat() { + return format; + } + + public CreateJobPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public CreateJobPb setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + + public CreateJobPb setJobClusters(Collection jobClusters) { + this.jobClusters = jobClusters; + return this; + } + + public Collection getJobClusters() { + return jobClusters; + } + + public CreateJobPb setMaxConcurrentRuns(Long maxConcurrentRuns) { + this.maxConcurrentRuns = maxConcurrentRuns; + return this; + } + + public Long getMaxConcurrentRuns() { + return maxConcurrentRuns; + } + + public CreateJobPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateJobPb setNotificationSettings(JobNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public JobNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public CreateJobPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public CreateJobPb setPerformanceTarget(PerformanceTarget performanceTarget) { + this.performanceTarget = performanceTarget; + return this; + } + + public PerformanceTarget getPerformanceTarget() { + return performanceTarget; + } + + public CreateJobPb setQueue(QueueSettings queue) { + this.queue = queue; + return this; + } + + public QueueSettings getQueue() { + return queue; + } + + public CreateJobPb setRunAs(JobRunAs runAs) { + this.runAs = runAs; + return this; + } + + public JobRunAs getRunAs() { + return runAs; + } + + public CreateJobPb setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public CreateJobPb setTags(Map tags) { + this.tags = tags; + return this; + } + + public Map getTags() { + return tags; + } + + public CreateJobPb setTasks(Collection tasks) { + this.tasks = tasks; + return this; + } + + public Collection getTasks() { + return tasks; + } + + public CreateJobPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public CreateJobPb setTrigger(TriggerSettings trigger) { + this.trigger = trigger; + return this; + } + + public TriggerSettings getTrigger() { + return trigger; + } + + public CreateJobPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + public CreateJobPb setWellKnown(AllWellKnown wellKnown) { + this.wellKnown = wellKnown; + return this; + } + + public AllWellKnown getWellKnown() { + return wellKnown; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateJobPb that = (CreateJobPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) + && Objects.equals(description, that.description) + && Objects.equals(editMode, that.editMode) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environments, that.environments) + && Objects.equals(format, that.format) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) + && Objects.equals(jobClusters, that.jobClusters) + && Objects.equals(maxConcurrentRuns, that.maxConcurrentRuns) + && Objects.equals(name, that.name) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(parameters, that.parameters) + && Objects.equals(performanceTarget, that.performanceTarget) + && Objects.equals(queue, that.queue) + && Objects.equals(runAs, that.runAs) + && Objects.equals(schedule, that.schedule) + && Objects.equals(tags, that.tags) + && Objects.equals(tasks, that.tasks) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(trigger, that.trigger) + && Objects.equals(webhookNotifications, that.webhookNotifications) + && Objects.equals(wellKnown, that.wellKnown); + } + + @Override + public int hashCode() { + return Objects.hash( + accessControlList, + budgetPolicyId, + continuous, + deployment, + description, + editMode, + emailNotifications, + environments, + format, + gitSource, + health, + jobClusters, + maxConcurrentRuns, + name, + notificationSettings, + parameters, + performanceTarget, + queue, + runAs, + schedule, + tags, + tasks, + timeoutSeconds, + trigger, + webhookNotifications, + wellKnown); + } + + @Override + public String toString() { + return new ToStringer(CreateJobPb.class) + .add("accessControlList", accessControlList) + .add("budgetPolicyId", budgetPolicyId) + .add("continuous", continuous) + .add("deployment", deployment) + .add("description", description) + .add("editMode", editMode) + .add("emailNotifications", emailNotifications) + .add("environments", environments) + .add("format", format) + .add("gitSource", gitSource) + .add("health", health) + .add("jobClusters", jobClusters) + .add("maxConcurrentRuns", maxConcurrentRuns) + .add("name", name) + .add("notificationSettings", notificationSettings) + .add("parameters", parameters) + .add("performanceTarget", performanceTarget) + .add("queue", queue) + .add("runAs", runAs) + .add("schedule", schedule) + .add("tags", tags) + .add("tasks", tasks) + .add("timeoutSeconds", timeoutSeconds) + .add("trigger", trigger) + .add("webhookNotifications", webhookNotifications) + .add("wellKnown", wellKnown) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java index 30e422378..1e5c15211 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Job was created successfully */ @Generated +@JsonSerialize(using = CreateResponse.CreateResponseSerializer.class) +@JsonDeserialize(using = CreateResponse.CreateResponseDeserializer.class) public class CreateResponse { /** The canonical identifier for the newly created job. */ - @JsonProperty("job_id") private Long jobId; public CreateResponse setJobId(Long jobId) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(CreateResponse.class).add("jobId", jobId).toString(); } + + CreateResponsePb toPb() { + CreateResponsePb pb = new CreateResponsePb(); + pb.setJobId(jobId); + + return pb; + } + + static CreateResponse fromPb(CreateResponsePb pb) { + CreateResponse model = new CreateResponse(); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class CreateResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateResponseDeserializer extends JsonDeserializer { + @Override + public CreateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateResponsePb pb = mapper.readValue(p, CreateResponsePb.class); + return CreateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponsePb.java new file mode 100755 index 000000000..620114da3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Job was created successfully */ +@Generated +class CreateResponsePb { + @JsonProperty("job_id") + private Long jobId; + + public CreateResponsePb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateResponsePb that = (CreateResponsePb) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public String toString() { + return new ToStringer(CreateResponsePb.class).add("jobId", jobId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java index 6cbe5d60a..a3086f48c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CronSchedule.CronScheduleSerializer.class) +@JsonDeserialize(using = CronSchedule.CronScheduleDeserializer.class) public class CronSchedule { /** Indicate whether this schedule is paused or not. */ - @JsonProperty("pause_status") private PauseStatus pauseStatus; /** @@ -20,7 +30,6 @@ public class CronSchedule { *

[Cron Trigger]: * http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html */ - @JsonProperty("quartz_cron_expression") private String quartzCronExpression; /** @@ -29,7 +38,6 @@ public class CronSchedule { * *

[Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html */ - @JsonProperty("timezone_id") private String timezoneId; public CronSchedule setPauseStatus(PauseStatus pauseStatus) { @@ -82,4 +90,41 @@ public String toString() { .add("timezoneId", timezoneId) .toString(); } + + CronSchedulePb toPb() { + CronSchedulePb pb = new CronSchedulePb(); + pb.setPauseStatus(pauseStatus); + pb.setQuartzCronExpression(quartzCronExpression); + pb.setTimezoneId(timezoneId); + + return pb; + } + + static CronSchedule fromPb(CronSchedulePb pb) { + CronSchedule model = new CronSchedule(); + model.setPauseStatus(pb.getPauseStatus()); + model.setQuartzCronExpression(pb.getQuartzCronExpression()); + model.setTimezoneId(pb.getTimezoneId()); + + return model; + } + + public static class CronScheduleSerializer extends JsonSerializer { + @Override + public void serialize(CronSchedule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CronSchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CronScheduleDeserializer extends JsonDeserializer { + @Override + public CronSchedule deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CronSchedulePb pb = mapper.readValue(p, CronSchedulePb.class); + return CronSchedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedulePb.java new file mode 100755 index 000000000..7b9d4393b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedulePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CronSchedulePb { + @JsonProperty("pause_status") + private PauseStatus pauseStatus; + + @JsonProperty("quartz_cron_expression") + private String quartzCronExpression; + + @JsonProperty("timezone_id") + private String timezoneId; + + public CronSchedulePb setPauseStatus(PauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public PauseStatus getPauseStatus() { + return pauseStatus; + } + + public CronSchedulePb setQuartzCronExpression(String quartzCronExpression) { + this.quartzCronExpression = quartzCronExpression; + return this; + } + + public String getQuartzCronExpression() { + return quartzCronExpression; + } + + public CronSchedulePb setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronSchedulePb that = (CronSchedulePb) o; + return Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(quartzCronExpression, that.quartzCronExpression) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus, quartzCronExpression, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(CronSchedulePb.class) + .add("pauseStatus", pauseStatus) + .add("quartzCronExpression", quartzCronExpression) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java index aba88b35f..1d16de9c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardPageSnapshot.DashboardPageSnapshotSerializer.class) +@JsonDeserialize(using = DashboardPageSnapshot.DashboardPageSnapshotDeserializer.class) public class DashboardPageSnapshot { /** */ - @JsonProperty("page_display_name") private String pageDisplayName; /** */ - @JsonProperty("widget_error_details") private Collection widgetErrorDetails; public DashboardPageSnapshot setPageDisplayName(String pageDisplayName) { @@ -58,4 +67,43 @@ public String toString() { .add("widgetErrorDetails", widgetErrorDetails) .toString(); } + + DashboardPageSnapshotPb toPb() { + DashboardPageSnapshotPb pb = new DashboardPageSnapshotPb(); + pb.setPageDisplayName(pageDisplayName); + pb.setWidgetErrorDetails(widgetErrorDetails); + + return pb; + } + + static DashboardPageSnapshot fromPb(DashboardPageSnapshotPb pb) { + DashboardPageSnapshot model = new DashboardPageSnapshot(); + model.setPageDisplayName(pb.getPageDisplayName()); + model.setWidgetErrorDetails(pb.getWidgetErrorDetails()); + + return model; + } + + public static class DashboardPageSnapshotSerializer + extends JsonSerializer { + @Override + public void serialize( + DashboardPageSnapshot value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardPageSnapshotPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardPageSnapshotDeserializer + extends JsonDeserializer { + @Override + public DashboardPageSnapshot deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardPageSnapshotPb pb = mapper.readValue(p, DashboardPageSnapshotPb.class); + return DashboardPageSnapshot.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshotPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshotPb.java new file mode 100755 index 000000000..bbd6afcb4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshotPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DashboardPageSnapshotPb { + @JsonProperty("page_display_name") + private String pageDisplayName; + + @JsonProperty("widget_error_details") + private Collection widgetErrorDetails; + + public DashboardPageSnapshotPb setPageDisplayName(String pageDisplayName) { + this.pageDisplayName = pageDisplayName; + return this; + } + + public String getPageDisplayName() { + return pageDisplayName; + } + + public DashboardPageSnapshotPb setWidgetErrorDetails( + Collection widgetErrorDetails) { + this.widgetErrorDetails = widgetErrorDetails; + return this; + } + + public Collection getWidgetErrorDetails() { + return widgetErrorDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardPageSnapshotPb that = (DashboardPageSnapshotPb) o; + return Objects.equals(pageDisplayName, that.pageDisplayName) + && Objects.equals(widgetErrorDetails, that.widgetErrorDetails); + } + + @Override + public int hashCode() { + return Objects.hash(pageDisplayName, widgetErrorDetails); + } + + @Override + public String toString() { + return new ToStringer(DashboardPageSnapshotPb.class) + .add("pageDisplayName", pageDisplayName) + .add("widgetErrorDetails", widgetErrorDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java index 9037bdd49..a04a80ad4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Configures the Lakeview Dashboard job task type. */ @Generated +@JsonSerialize(using = DashboardTask.DashboardTaskSerializer.class) +@JsonDeserialize(using = DashboardTask.DashboardTaskDeserializer.class) public class DashboardTask { /** The identifier of the dashboard to refresh. */ - @JsonProperty("dashboard_id") private String dashboardId; /** Optional: subscription configuration for sending the dashboard snapshot. */ - @JsonProperty("subscription") private Subscription subscription; /** * Optional: The warehouse id to execute the dashboard with for the schedule. If not specified, * the default warehouse of the dashboard will be used. */ - @JsonProperty("warehouse_id") private String warehouseId; public DashboardTask setDashboardId(String dashboardId) { @@ -75,4 +83,41 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + DashboardTaskPb toPb() { + DashboardTaskPb pb = new DashboardTaskPb(); + pb.setDashboardId(dashboardId); + pb.setSubscription(subscription); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static DashboardTask fromPb(DashboardTaskPb pb) { + DashboardTask model = new DashboardTask(); + model.setDashboardId(pb.getDashboardId()); + model.setSubscription(pb.getSubscription()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class DashboardTaskSerializer extends JsonSerializer { + @Override + public void serialize(DashboardTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardTaskDeserializer extends JsonDeserializer { + @Override + public DashboardTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardTaskPb pb = mapper.readValue(p, DashboardTaskPb.class); + return DashboardTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java index 51ccfd9ea..d920f22b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardTaskOutput.DashboardTaskOutputSerializer.class) +@JsonDeserialize(using = DashboardTaskOutput.DashboardTaskOutputDeserializer.class) public class DashboardTaskOutput { /** Should only be populated for manual PDF download jobs. */ - @JsonProperty("page_snapshots") private Collection pageSnapshots; public DashboardTaskOutput setPageSnapshots(Collection pageSnapshots) { @@ -40,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(DashboardTaskOutput.class).add("pageSnapshots", pageSnapshots).toString(); } + + DashboardTaskOutputPb toPb() { + DashboardTaskOutputPb pb = new DashboardTaskOutputPb(); + pb.setPageSnapshots(pageSnapshots); + + return pb; + } + + static DashboardTaskOutput fromPb(DashboardTaskOutputPb pb) { + DashboardTaskOutput model = new DashboardTaskOutput(); + model.setPageSnapshots(pb.getPageSnapshots()); + + return model; + } + + public static class DashboardTaskOutputSerializer extends JsonSerializer { + @Override + public void serialize(DashboardTaskOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardTaskOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardTaskOutputDeserializer + extends JsonDeserializer { + @Override + public DashboardTaskOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardTaskOutputPb pb = mapper.readValue(p, DashboardTaskOutputPb.class); + return DashboardTaskOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutputPb.java new file mode 100755 index 000000000..8c481a91d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutputPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DashboardTaskOutputPb { + @JsonProperty("page_snapshots") + private Collection pageSnapshots; + + public DashboardTaskOutputPb setPageSnapshots(Collection pageSnapshots) { + this.pageSnapshots = pageSnapshots; + return this; + } + + public Collection getPageSnapshots() { + return pageSnapshots; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardTaskOutputPb that = (DashboardTaskOutputPb) o; + return Objects.equals(pageSnapshots, that.pageSnapshots); + } + + @Override + public int hashCode() { + return Objects.hash(pageSnapshots); + } + + @Override + public String toString() { + return new ToStringer(DashboardTaskOutputPb.class) + .add("pageSnapshots", pageSnapshots) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskPb.java new file mode 100755 index 000000000..ee6bb4a55 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configures the Lakeview Dashboard job task type. */ +@Generated +class DashboardTaskPb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("subscription") + private Subscription subscription; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public DashboardTaskPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DashboardTaskPb setSubscription(Subscription subscription) { + this.subscription = subscription; + return this; + } + + public Subscription getSubscription() { + return subscription; + } + + public DashboardTaskPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardTaskPb that = (DashboardTaskPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(subscription, that.subscription) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, subscription, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DashboardTaskPb.class) + .add("dashboardId", dashboardId) + .add("subscription", subscription) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java new file mode 100755 index 000000000..7f2d95040 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Format of response retrieved from dbt Cloud, for inclusion in output */ +@Generated +@JsonSerialize(using = DbtCloudJobRunStep.DbtCloudJobRunStepSerializer.class) +@JsonDeserialize(using = DbtCloudJobRunStep.DbtCloudJobRunStepDeserializer.class) +public class DbtCloudJobRunStep { + /** Orders the steps in the job */ + private Long index; + + /** Output of the step */ + private String logs; + + /** Name of the step in the job */ + private String name; + + /** State of the step */ + private DbtCloudRunStatus status; + + public DbtCloudJobRunStep setIndex(Long index) { + this.index = index; + return this; + } + + public Long getIndex() { + return index; + } + + public DbtCloudJobRunStep setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + public DbtCloudJobRunStep setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DbtCloudJobRunStep setStatus(DbtCloudRunStatus status) { + this.status = status; + return this; + } + + public DbtCloudRunStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudJobRunStep that = (DbtCloudJobRunStep) o; + return Objects.equals(index, that.index) + && Objects.equals(logs, that.logs) + && Objects.equals(name, that.name) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(index, logs, name, status); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudJobRunStep.class) + .add("index", index) + .add("logs", logs) + .add("name", name) + .add("status", status) + .toString(); + } + + DbtCloudJobRunStepPb toPb() { + DbtCloudJobRunStepPb pb = new DbtCloudJobRunStepPb(); + pb.setIndex(index); + pb.setLogs(logs); + pb.setName(name); + pb.setStatus(status); + + return pb; + } + + static DbtCloudJobRunStep fromPb(DbtCloudJobRunStepPb pb) { + DbtCloudJobRunStep model = new DbtCloudJobRunStep(); + model.setIndex(pb.getIndex()); + model.setLogs(pb.getLogs()); + model.setName(pb.getName()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class DbtCloudJobRunStepSerializer extends JsonSerializer { + @Override + public void serialize(DbtCloudJobRunStep value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbtCloudJobRunStepPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbtCloudJobRunStepDeserializer extends JsonDeserializer { + @Override + public DbtCloudJobRunStep deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbtCloudJobRunStepPb pb = mapper.readValue(p, DbtCloudJobRunStepPb.class); + return DbtCloudJobRunStep.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStepPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStepPb.java new file mode 100755 index 000000000..1a7f60505 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStepPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Format of response retrieved from dbt Cloud, for inclusion in output */ +@Generated +class DbtCloudJobRunStepPb { + @JsonProperty("index") + private Long index; + + @JsonProperty("logs") + private String logs; + + @JsonProperty("name") + private String name; + + @JsonProperty("status") + private DbtCloudRunStatus status; + + public DbtCloudJobRunStepPb setIndex(Long index) { + this.index = index; + return this; + } + + public Long getIndex() { + return index; + } + + public DbtCloudJobRunStepPb setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + public DbtCloudJobRunStepPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DbtCloudJobRunStepPb setStatus(DbtCloudRunStatus status) { + this.status = status; + return this; + } + + public DbtCloudRunStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudJobRunStepPb that = (DbtCloudJobRunStepPb) o; + return Objects.equals(index, that.index) + && Objects.equals(logs, that.logs) + && Objects.equals(name, that.name) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(index, logs, name, status); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudJobRunStepPb.class) + .add("index", index) + .add("logs", logs) + .add("name", name) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java new file mode 100755 index 000000000..9ae787e53 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** Response enumeration from calling the dbt Cloud API, for inclusion in output */ +@Generated +public enum DbtCloudRunStatus { + CANCELLED, + ERROR, + QUEUED, + RUNNING, + STARTING, + SUCCESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java new file mode 100755 index 000000000..29d06161e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DbtCloudTask.DbtCloudTaskSerializer.class) +@JsonDeserialize(using = DbtCloudTask.DbtCloudTaskDeserializer.class) +public class DbtCloudTask { + /** The resource name of the UC connection that authenticates the dbt Cloud for this task */ + private String connectionResourceName; + + /** Id of the dbt Cloud job to be triggered */ + private Long dbtCloudJobId; + + public DbtCloudTask setConnectionResourceName(String connectionResourceName) { + this.connectionResourceName = connectionResourceName; + return this; + } + + public String getConnectionResourceName() { + return connectionResourceName; + } + + public DbtCloudTask setDbtCloudJobId(Long dbtCloudJobId) { + this.dbtCloudJobId = dbtCloudJobId; + return this; + } + + public Long getDbtCloudJobId() { + return dbtCloudJobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTask that = (DbtCloudTask) o; + return Objects.equals(connectionResourceName, that.connectionResourceName) + && Objects.equals(dbtCloudJobId, that.dbtCloudJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectionResourceName, dbtCloudJobId); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTask.class) + .add("connectionResourceName", connectionResourceName) + .add("dbtCloudJobId", dbtCloudJobId) + .toString(); + } + + DbtCloudTaskPb toPb() { + DbtCloudTaskPb pb = new DbtCloudTaskPb(); + pb.setConnectionResourceName(connectionResourceName); + pb.setDbtCloudJobId(dbtCloudJobId); + + return pb; + } + + static DbtCloudTask fromPb(DbtCloudTaskPb pb) { + DbtCloudTask model = new DbtCloudTask(); + model.setConnectionResourceName(pb.getConnectionResourceName()); + model.setDbtCloudJobId(pb.getDbtCloudJobId()); + + return model; + } + + public static class DbtCloudTaskSerializer extends JsonSerializer { + @Override + public void serialize(DbtCloudTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbtCloudTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbtCloudTaskDeserializer extends JsonDeserializer { + @Override + public DbtCloudTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbtCloudTaskPb pb = mapper.readValue(p, DbtCloudTaskPb.class); + return DbtCloudTask.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java new file mode 100755 index 000000000..9bda3ac7a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java @@ -0,0 +1,122 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DbtCloudTaskOutput.DbtCloudTaskOutputSerializer.class) +@JsonDeserialize(using = DbtCloudTaskOutput.DbtCloudTaskOutputDeserializer.class) +public class DbtCloudTaskOutput { + /** Id of the job run in dbt Cloud */ + private Long dbtCloudJobRunId; + + /** Steps of the job run as received from dbt Cloud */ + private Collection dbtCloudJobRunOutput; + + /** Url where full run details can be viewed */ + private String dbtCloudJobRunUrl; + + public DbtCloudTaskOutput setDbtCloudJobRunId(Long dbtCloudJobRunId) { + this.dbtCloudJobRunId = dbtCloudJobRunId; + return this; + } + + public Long getDbtCloudJobRunId() { + return dbtCloudJobRunId; + } + + public DbtCloudTaskOutput setDbtCloudJobRunOutput( + Collection dbtCloudJobRunOutput) { + this.dbtCloudJobRunOutput = dbtCloudJobRunOutput; + return this; + } + + public Collection getDbtCloudJobRunOutput() { + return dbtCloudJobRunOutput; + } + + public DbtCloudTaskOutput setDbtCloudJobRunUrl(String dbtCloudJobRunUrl) { + this.dbtCloudJobRunUrl = dbtCloudJobRunUrl; + return this; + } + + public String getDbtCloudJobRunUrl() { + return dbtCloudJobRunUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTaskOutput that = (DbtCloudTaskOutput) o; + return Objects.equals(dbtCloudJobRunId, that.dbtCloudJobRunId) + && Objects.equals(dbtCloudJobRunOutput, that.dbtCloudJobRunOutput) + && Objects.equals(dbtCloudJobRunUrl, that.dbtCloudJobRunUrl); + } + + @Override + public int hashCode() { + return Objects.hash(dbtCloudJobRunId, dbtCloudJobRunOutput, dbtCloudJobRunUrl); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTaskOutput.class) + .add("dbtCloudJobRunId", dbtCloudJobRunId) + .add("dbtCloudJobRunOutput", dbtCloudJobRunOutput) + .add("dbtCloudJobRunUrl", dbtCloudJobRunUrl) + .toString(); + } + + DbtCloudTaskOutputPb toPb() { + DbtCloudTaskOutputPb pb = new DbtCloudTaskOutputPb(); + pb.setDbtCloudJobRunId(dbtCloudJobRunId); + pb.setDbtCloudJobRunOutput(dbtCloudJobRunOutput); + pb.setDbtCloudJobRunUrl(dbtCloudJobRunUrl); + + return pb; + } + + static DbtCloudTaskOutput fromPb(DbtCloudTaskOutputPb pb) { + DbtCloudTaskOutput model = new DbtCloudTaskOutput(); + model.setDbtCloudJobRunId(pb.getDbtCloudJobRunId()); + model.setDbtCloudJobRunOutput(pb.getDbtCloudJobRunOutput()); + model.setDbtCloudJobRunUrl(pb.getDbtCloudJobRunUrl()); + + return model; + } + + public static class DbtCloudTaskOutputSerializer extends JsonSerializer { + @Override + public void serialize(DbtCloudTaskOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbtCloudTaskOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbtCloudTaskOutputDeserializer extends JsonDeserializer { + @Override + public DbtCloudTaskOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbtCloudTaskOutputPb pb = mapper.readValue(p, DbtCloudTaskOutputPb.class); + return DbtCloudTaskOutput.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutputPb.java new file mode 100755 index 000000000..6710d01c6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutputPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DbtCloudTaskOutputPb { + @JsonProperty("dbt_cloud_job_run_id") + private Long dbtCloudJobRunId; + + @JsonProperty("dbt_cloud_job_run_output") + private Collection dbtCloudJobRunOutput; + + @JsonProperty("dbt_cloud_job_run_url") + private String dbtCloudJobRunUrl; + + public DbtCloudTaskOutputPb setDbtCloudJobRunId(Long dbtCloudJobRunId) { + this.dbtCloudJobRunId = dbtCloudJobRunId; + return this; + } + + public Long getDbtCloudJobRunId() { + return dbtCloudJobRunId; + } + + public DbtCloudTaskOutputPb setDbtCloudJobRunOutput( + Collection dbtCloudJobRunOutput) { + this.dbtCloudJobRunOutput = dbtCloudJobRunOutput; + return this; + } + + public Collection getDbtCloudJobRunOutput() { + return dbtCloudJobRunOutput; + } + + public DbtCloudTaskOutputPb setDbtCloudJobRunUrl(String dbtCloudJobRunUrl) { + this.dbtCloudJobRunUrl = dbtCloudJobRunUrl; + return this; + } + + public String getDbtCloudJobRunUrl() { + return dbtCloudJobRunUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTaskOutputPb that = (DbtCloudTaskOutputPb) o; + return Objects.equals(dbtCloudJobRunId, that.dbtCloudJobRunId) + && Objects.equals(dbtCloudJobRunOutput, that.dbtCloudJobRunOutput) + && Objects.equals(dbtCloudJobRunUrl, that.dbtCloudJobRunUrl); + } + + @Override + public int hashCode() { + return Objects.hash(dbtCloudJobRunId, dbtCloudJobRunOutput, dbtCloudJobRunUrl); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTaskOutputPb.class) + .add("dbtCloudJobRunId", dbtCloudJobRunId) + .add("dbtCloudJobRunOutput", dbtCloudJobRunOutput) + .add("dbtCloudJobRunUrl", dbtCloudJobRunUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskPb.java new file mode 100755 index 000000000..59065ae6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DbtCloudTaskPb { + @JsonProperty("connection_resource_name") + private String connectionResourceName; + + @JsonProperty("dbt_cloud_job_id") + private Long dbtCloudJobId; + + public DbtCloudTaskPb setConnectionResourceName(String connectionResourceName) { + this.connectionResourceName = connectionResourceName; + return this; + } + + public String getConnectionResourceName() { + return connectionResourceName; + } + + public DbtCloudTaskPb setDbtCloudJobId(Long dbtCloudJobId) { + this.dbtCloudJobId = dbtCloudJobId; + return this; + } + + public Long getDbtCloudJobId() { + return dbtCloudJobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTaskPb that = (DbtCloudTaskPb) o; + return Objects.equals(connectionResourceName, that.connectionResourceName) + && Objects.equals(dbtCloudJobId, that.dbtCloudJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectionResourceName, dbtCloudJobId); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTaskPb.class) + .add("connectionResourceName", connectionResourceName) + .add("dbtCloudJobId", dbtCloudJobId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java index 523d88c3e..8a1e4741e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = DbtOutput.DbtOutputSerializer.class) +@JsonDeserialize(using = DbtOutput.DbtOutputDeserializer.class) public class DbtOutput { /** An optional map of headers to send when retrieving the artifact from the `artifacts_link`. */ - @JsonProperty("artifacts_headers") private Map artifactsHeaders; /** * A pre-signed URL to download the (compressed) dbt artifacts. This link is valid for a limited * time (30 minutes). This information is only available after the run has finished. */ - @JsonProperty("artifacts_link") private String artifactsLink; public DbtOutput setArtifactsHeaders(Map artifactsHeaders) { @@ -60,4 +69,39 @@ public String toString() { .add("artifactsLink", artifactsLink) .toString(); } + + DbtOutputPb toPb() { + DbtOutputPb pb = new DbtOutputPb(); + pb.setArtifactsHeaders(artifactsHeaders); + pb.setArtifactsLink(artifactsLink); + + return pb; + } + + static DbtOutput fromPb(DbtOutputPb pb) { + DbtOutput model = new DbtOutput(); + model.setArtifactsHeaders(pb.getArtifactsHeaders()); + model.setArtifactsLink(pb.getArtifactsLink()); + + return model; + } + + public static class DbtOutputSerializer extends JsonSerializer { + @Override + public void serialize(DbtOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbtOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbtOutputDeserializer extends JsonDeserializer { + @Override + public DbtOutput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbtOutputPb pb = mapper.readValue(p, DbtOutputPb.class); + return DbtOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutputPb.java new file mode 100755 index 000000000..674c028c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutputPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class DbtOutputPb { + @JsonProperty("artifacts_headers") + private Map artifactsHeaders; + + @JsonProperty("artifacts_link") + private String artifactsLink; + + public DbtOutputPb setArtifactsHeaders(Map artifactsHeaders) { + this.artifactsHeaders = artifactsHeaders; + return this; + } + + public Map getArtifactsHeaders() { + return artifactsHeaders; + } + + public DbtOutputPb setArtifactsLink(String artifactsLink) { + this.artifactsLink = artifactsLink; + return this; + } + + public String getArtifactsLink() { + return artifactsLink; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtOutputPb that = (DbtOutputPb) o; + return Objects.equals(artifactsHeaders, that.artifactsHeaders) + && Objects.equals(artifactsLink, that.artifactsLink); + } + + @Override + public int hashCode() { + return Objects.hash(artifactsHeaders, artifactsLink); + } + + @Override + public String toString() { + return new ToStringer(DbtOutputPb.class) + .add("artifactsHeaders", artifactsHeaders) + .add("artifactsLink", artifactsLink) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java index b27b556a2..1d9592c18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DbtTask.DbtTaskSerializer.class) +@JsonDeserialize(using = DbtTask.DbtTaskDeserializer.class) public class DbtTask { /** * Optional name of the catalog to use. The value is the top level in the 3-level namespace of * Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a * warehouse_id is specified. Requires dbt-databricks >= 1.1.1. */ - @JsonProperty("catalog") private String catalog; /** * A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not * be empty. A maximum of up to 10 commands can be provided. */ - @JsonProperty("commands") private Collection commands; /** @@ -30,21 +39,18 @@ public class DbtTask { * specified. If no warehouse_id is specified and this folder is unset, the root directory is * used. */ - @JsonProperty("profiles_directory") private String profilesDirectory; /** * Path to the project directory. Optional for Git sourced tasks, in which case if no value is * provided, the root of the Git repository is used. */ - @JsonProperty("project_directory") private String projectDirectory; /** * Optional schema to write to. This parameter is only used when a warehouse_id is also provided. * If not provided, the `default` schema is used. */ - @JsonProperty("schema") private String schema; /** @@ -56,7 +62,6 @@ public class DbtTask { *

* `WORKSPACE`: Project is located in Databricks workspace. * `GIT`: Project is located in * cloud Git provider. */ - @JsonProperty("source") private Source source; /** @@ -64,7 +69,6 @@ public class DbtTask { * profile and connection details to dbt. It can be overridden on a per-command basis by using the * `--profiles-dir` command line argument. */ - @JsonProperty("warehouse_id") private String warehouseId; public DbtTask setCatalog(String catalog) { @@ -162,4 +166,49 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + DbtTaskPb toPb() { + DbtTaskPb pb = new DbtTaskPb(); + pb.setCatalog(catalog); + pb.setCommands(commands); + pb.setProfilesDirectory(profilesDirectory); + pb.setProjectDirectory(projectDirectory); + pb.setSchema(schema); + pb.setSource(source); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static DbtTask fromPb(DbtTaskPb pb) { + DbtTask model = new DbtTask(); + model.setCatalog(pb.getCatalog()); + model.setCommands(pb.getCommands()); + model.setProfilesDirectory(pb.getProfilesDirectory()); + model.setProjectDirectory(pb.getProjectDirectory()); + model.setSchema(pb.getSchema()); + model.setSource(pb.getSource()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class DbtTaskSerializer extends JsonSerializer { + @Override + public void serialize(DbtTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DbtTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DbtTaskDeserializer extends JsonDeserializer { + @Override + public DbtTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DbtTaskPb pb = mapper.readValue(p, DbtTaskPb.class); + return DbtTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTaskPb.java new file mode 100755 index 000000000..275a76723 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTaskPb.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DbtTaskPb { + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("commands") + private Collection commands; + + @JsonProperty("profiles_directory") + private String profilesDirectory; + + @JsonProperty("project_directory") + private String projectDirectory; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("source") + private Source source; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public DbtTaskPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public DbtTaskPb setCommands(Collection commands) { + this.commands = commands; + return this; + } + + public Collection getCommands() { + return commands; + } + + public DbtTaskPb setProfilesDirectory(String profilesDirectory) { + this.profilesDirectory = profilesDirectory; + return this; + } + + public String getProfilesDirectory() { + return profilesDirectory; + } + + public DbtTaskPb setProjectDirectory(String projectDirectory) { + this.projectDirectory = projectDirectory; + return this; + } + + public String getProjectDirectory() { + return projectDirectory; + } + + public DbtTaskPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public DbtTaskPb setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + public DbtTaskPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtTaskPb that = (DbtTaskPb) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(commands, that.commands) + && Objects.equals(profilesDirectory, that.profilesDirectory) + && Objects.equals(projectDirectory, that.projectDirectory) + && Objects.equals(schema, that.schema) + && Objects.equals(source, that.source) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + catalog, commands, profilesDirectory, projectDirectory, schema, source, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DbtTaskPb.class) + .add("catalog", catalog) + .add("commands", commands) + .add("profilesDirectory", profilesDirectory) + .add("projectDirectory", projectDirectory) + .add("schema", schema) + .add("source", source) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java index 953fe079c..3419e1198 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteJob.DeleteJobSerializer.class) +@JsonDeserialize(using = DeleteJob.DeleteJobDeserializer.class) public class DeleteJob { /** The canonical identifier of the job to delete. This field is required. */ - @JsonProperty("job_id") private Long jobId; public DeleteJob setJobId(Long jobId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteJob.class).add("jobId", jobId).toString(); } + + DeleteJobPb toPb() { + DeleteJobPb pb = new DeleteJobPb(); + pb.setJobId(jobId); + + return pb; + } + + static DeleteJob fromPb(DeleteJobPb pb) { + DeleteJob model = new DeleteJob(); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class DeleteJobSerializer extends JsonSerializer { + @Override + public void serialize(DeleteJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteJobDeserializer extends JsonDeserializer { + @Override + public DeleteJob deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteJobPb pb = mapper.readValue(p, DeleteJobPb.class); + return DeleteJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJobPb.java new file mode 100755 index 000000000..5e755211a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJobPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteJobPb { + @JsonProperty("job_id") + private Long jobId; + + public DeleteJobPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteJobPb that = (DeleteJobPb) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public String toString() { + return new ToStringer(DeleteJobPb.class).add("jobId", jobId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java index 2fb1336ff..8ce779b8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponsePb.java new file mode 100755 index 000000000..c4d7c4634 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java index 263bd5cc8..de6c1af52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRun.DeleteRunSerializer.class) +@JsonDeserialize(using = DeleteRun.DeleteRunDeserializer.class) public class DeleteRun { /** ID of the run to delete. */ - @JsonProperty("run_id") private Long runId; public DeleteRun setRunId(Long runId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRun.class).add("runId", runId).toString(); } + + DeleteRunPb toPb() { + DeleteRunPb pb = new DeleteRunPb(); + pb.setRunId(runId); + + return pb; + } + + static DeleteRun fromPb(DeleteRunPb pb) { + DeleteRun model = new DeleteRun(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class DeleteRunSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunDeserializer extends JsonDeserializer { + @Override + public DeleteRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunPb pb = mapper.readValue(p, DeleteRunPb.class); + return DeleteRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunPb.java new file mode 100755 index 000000000..857bd609f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteRunPb { + @JsonProperty("run_id") + private Long runId; + + public DeleteRunPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRunPb that = (DeleteRunPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java index 125c19ad2..cde53fccb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRunResponse.DeleteRunResponseSerializer.class) +@JsonDeserialize(using = DeleteRunResponse.DeleteRunResponseDeserializer.class) public class DeleteRunResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRunResponse.class).toString(); } + + DeleteRunResponsePb toPb() { + DeleteRunResponsePb pb = new DeleteRunResponsePb(); + + return pb; + } + + static DeleteRunResponse fromPb(DeleteRunResponsePb pb) { + DeleteRunResponse model = new DeleteRunResponse(); + + return model; + } + + public static class DeleteRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunResponseDeserializer extends JsonDeserializer { + @Override + public DeleteRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunResponsePb pb = mapper.readValue(p, DeleteRunResponsePb.class); + return DeleteRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponsePb.java new file mode 100755 index 000000000..f47fbe1cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteRunResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java index b9fe17ce5..0b56cf960 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,9 +21,16 @@ * become compliant with their policies. */ @Generated +@JsonSerialize( + using = + EnforcePolicyComplianceForJobResponseJobClusterSettingsChange + .EnforcePolicyComplianceForJobResponseJobClusterSettingsChangeSerializer.class) +@JsonDeserialize( + using = + EnforcePolicyComplianceForJobResponseJobClusterSettingsChange + .EnforcePolicyComplianceForJobResponseJobClusterSettingsChangeDeserializer.class) public class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange { /** The field where this change would be made, prepended with the job cluster key. */ - @JsonProperty("field") private String field; /** @@ -22,7 +38,6 @@ public class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange { * string) converted to a string. This is intended to be read by a human. The typed new value of * this field can be retrieved by reading the settings field in the API response. */ - @JsonProperty("new_value") private String newValue; /** @@ -30,7 +45,6 @@ public class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange { * boolean, or a string) converted to a string. This is intended to be read by a human. The type * of the field can be retrieved by reading the settings field in the API response. */ - @JsonProperty("previous_value") private String previousValue; public EnforcePolicyComplianceForJobResponseJobClusterSettingsChange setField(String field) { @@ -86,4 +100,52 @@ public String toString() { .add("previousValue", previousValue) .toString(); } + + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb toPb() { + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb pb = + new EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb(); + pb.setField(field); + pb.setNewValue(newValue); + pb.setPreviousValue(previousValue); + + return pb; + } + + static EnforcePolicyComplianceForJobResponseJobClusterSettingsChange fromPb( + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb pb) { + EnforcePolicyComplianceForJobResponseJobClusterSettingsChange model = + new EnforcePolicyComplianceForJobResponseJobClusterSettingsChange(); + model.setField(pb.getField()); + model.setNewValue(pb.getNewValue()); + model.setPreviousValue(pb.getPreviousValue()); + + return model; + } + + public static class EnforcePolicyComplianceForJobResponseJobClusterSettingsChangeSerializer + extends JsonSerializer { + @Override + public void serialize( + EnforcePolicyComplianceForJobResponseJobClusterSettingsChange value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnforcePolicyComplianceForJobResponseJobClusterSettingsChangeDeserializer + extends JsonDeserializer { + @Override + public EnforcePolicyComplianceForJobResponseJobClusterSettingsChange deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb pb = + mapper.readValue( + p, EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb.class); + return EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb.java new file mode 100755 index 000000000..4152723c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Represents a change to the job cluster's settings that would be required for the job clusters to + * become compliant with their policies. + */ +@Generated +class EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb { + @JsonProperty("field") + private String field; + + @JsonProperty("new_value") + private String newValue; + + @JsonProperty("previous_value") + private String previousValue; + + public EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb setField(String field) { + this.field = field; + return this; + } + + public String getField() { + return field; + } + + public EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb setNewValue( + String newValue) { + this.newValue = newValue; + return this; + } + + public String getNewValue() { + return newValue; + } + + public EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb setPreviousValue( + String previousValue) { + this.previousValue = previousValue; + return this; + } + + public String getPreviousValue() { + return previousValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb that = + (EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb) o; + return Objects.equals(field, that.field) + && Objects.equals(newValue, that.newValue) + && Objects.equals(previousValue, that.previousValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, newValue, previousValue); + } + + @Override + public String toString() { + return new ToStringer(EnforcePolicyComplianceForJobResponseJobClusterSettingsChangePb.class) + .add("field", field) + .add("newValue", newValue) + .add("previousValue", previousValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java index 627e7a897..07774f9ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = EnforcePolicyComplianceRequest.EnforcePolicyComplianceRequestSerializer.class) +@JsonDeserialize( + using = EnforcePolicyComplianceRequest.EnforcePolicyComplianceRequestDeserializer.class) public class EnforcePolicyComplianceRequest { /** The ID of the job you want to enforce policy compliance on. */ - @JsonProperty("job_id") private Long jobId; /** * If set, previews changes made to the job to comply with its policy, but does not update the * job. */ - @JsonProperty("validate_only") private Boolean validateOnly; public EnforcePolicyComplianceRequest setJobId(Long jobId) { @@ -58,4 +69,44 @@ public String toString() { .add("validateOnly", validateOnly) .toString(); } + + EnforcePolicyComplianceRequestPb toPb() { + EnforcePolicyComplianceRequestPb pb = new EnforcePolicyComplianceRequestPb(); + pb.setJobId(jobId); + pb.setValidateOnly(validateOnly); + + return pb; + } + + static EnforcePolicyComplianceRequest fromPb(EnforcePolicyComplianceRequestPb pb) { + EnforcePolicyComplianceRequest model = new EnforcePolicyComplianceRequest(); + model.setJobId(pb.getJobId()); + model.setValidateOnly(pb.getValidateOnly()); + + return model; + } + + public static class EnforcePolicyComplianceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + EnforcePolicyComplianceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnforcePolicyComplianceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnforcePolicyComplianceRequestDeserializer + extends JsonDeserializer { + @Override + public EnforcePolicyComplianceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnforcePolicyComplianceRequestPb pb = + mapper.readValue(p, EnforcePolicyComplianceRequestPb.class); + return EnforcePolicyComplianceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequestPb.java new file mode 100755 index 000000000..355f40837 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnforcePolicyComplianceRequestPb { + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("validate_only") + private Boolean validateOnly; + + public EnforcePolicyComplianceRequestPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public EnforcePolicyComplianceRequestPb setValidateOnly(Boolean validateOnly) { + this.validateOnly = validateOnly; + return this; + } + + public Boolean getValidateOnly() { + return validateOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnforcePolicyComplianceRequestPb that = (EnforcePolicyComplianceRequestPb) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(validateOnly, that.validateOnly); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, validateOnly); + } + + @Override + public String toString() { + return new ToStringer(EnforcePolicyComplianceRequestPb.class) + .add("jobId", jobId) + .add("validateOnly", validateOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java index f75f2addc..dfe49dd6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java @@ -4,24 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = EnforcePolicyComplianceResponse.EnforcePolicyComplianceResponseSerializer.class) +@JsonDeserialize( + using = EnforcePolicyComplianceResponse.EnforcePolicyComplianceResponseDeserializer.class) public class EnforcePolicyComplianceResponse { /** * Whether any changes have been made to the job cluster settings for the job to become compliant * with its policies. */ - @JsonProperty("has_changes") private Boolean hasChanges; /** * A list of job cluster changes that have been made to the job’s cluster settings in order for * all job clusters to become compliant with their policies. */ - @JsonProperty("job_cluster_changes") private Collection jobClusterChanges; @@ -31,7 +42,6 @@ public class EnforcePolicyComplianceResponse { * to existing all-purpose clusters. Updated job settings are derived by applying policy default * values to the existing job clusters in order to satisfy policy requirements. */ - @JsonProperty("settings") private JobSettings settings; public EnforcePolicyComplianceResponse setHasChanges(Boolean hasChanges) { @@ -86,4 +96,46 @@ public String toString() { .add("settings", settings) .toString(); } + + EnforcePolicyComplianceResponsePb toPb() { + EnforcePolicyComplianceResponsePb pb = new EnforcePolicyComplianceResponsePb(); + pb.setHasChanges(hasChanges); + pb.setJobClusterChanges(jobClusterChanges); + pb.setSettings(settings); + + return pb; + } + + static EnforcePolicyComplianceResponse fromPb(EnforcePolicyComplianceResponsePb pb) { + EnforcePolicyComplianceResponse model = new EnforcePolicyComplianceResponse(); + model.setHasChanges(pb.getHasChanges()); + model.setJobClusterChanges(pb.getJobClusterChanges()); + model.setSettings(pb.getSettings()); + + return model; + } + + public static class EnforcePolicyComplianceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + EnforcePolicyComplianceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnforcePolicyComplianceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnforcePolicyComplianceResponseDeserializer + extends JsonDeserializer { + @Override + public EnforcePolicyComplianceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnforcePolicyComplianceResponsePb pb = + mapper.readValue(p, EnforcePolicyComplianceResponsePb.class); + return EnforcePolicyComplianceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponsePb.java new file mode 100755 index 000000000..77a075e42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponsePb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EnforcePolicyComplianceResponsePb { + @JsonProperty("has_changes") + private Boolean hasChanges; + + @JsonProperty("job_cluster_changes") + private Collection + jobClusterChanges; + + @JsonProperty("settings") + private JobSettings settings; + + public EnforcePolicyComplianceResponsePb setHasChanges(Boolean hasChanges) { + this.hasChanges = hasChanges; + return this; + } + + public Boolean getHasChanges() { + return hasChanges; + } + + public EnforcePolicyComplianceResponsePb setJobClusterChanges( + Collection jobClusterChanges) { + this.jobClusterChanges = jobClusterChanges; + return this; + } + + public Collection + getJobClusterChanges() { + return jobClusterChanges; + } + + public EnforcePolicyComplianceResponsePb setSettings(JobSettings settings) { + this.settings = settings; + return this; + } + + public JobSettings getSettings() { + return settings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnforcePolicyComplianceResponsePb that = (EnforcePolicyComplianceResponsePb) o; + return Objects.equals(hasChanges, that.hasChanges) + && Objects.equals(jobClusterChanges, that.jobClusterChanges) + && Objects.equals(settings, that.settings); + } + + @Override + public int hashCode() { + return Objects.hash(hasChanges, jobClusterChanges, settings); + } + + @Override + public String toString() { + return new ToStringer(EnforcePolicyComplianceResponsePb.class) + .add("hasChanges", hasChanges) + .add("jobClusterChanges", jobClusterChanges) + .add("settings", settings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java index 09a4c43b7..52ec5c0e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Run was exported successfully. */ @Generated +@JsonSerialize(using = ExportRunOutput.ExportRunOutputSerializer.class) +@JsonDeserialize(using = ExportRunOutput.ExportRunOutputDeserializer.class) public class ExportRunOutput { /** * The exported content in HTML format (one for every view item). To extract the HTML notebook @@ -17,7 +28,6 @@ public class ExportRunOutput { * *

[Python script]: https://docs.databricks.com/en/_static/examples/extract.py */ - @JsonProperty("views") private Collection views; public ExportRunOutput setViews(Collection views) { @@ -46,4 +56,38 @@ public int hashCode() { public String toString() { return new ToStringer(ExportRunOutput.class).add("views", views).toString(); } + + ExportRunOutputPb toPb() { + ExportRunOutputPb pb = new ExportRunOutputPb(); + pb.setViews(views); + + return pb; + } + + static ExportRunOutput fromPb(ExportRunOutputPb pb) { + ExportRunOutput model = new ExportRunOutput(); + model.setViews(pb.getViews()); + + return model; + } + + public static class ExportRunOutputSerializer extends JsonSerializer { + @Override + public void serialize(ExportRunOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportRunOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportRunOutputDeserializer extends JsonDeserializer { + @Override + public ExportRunOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportRunOutputPb pb = mapper.readValue(p, ExportRunOutputPb.class); + return ExportRunOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutputPb.java new file mode 100755 index 000000000..02e819df4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutputPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Run was exported successfully. */ +@Generated +class ExportRunOutputPb { + @JsonProperty("views") + private Collection views; + + public ExportRunOutputPb setViews(Collection views) { + this.views = views; + return this; + } + + public Collection getViews() { + return views; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportRunOutputPb that = (ExportRunOutputPb) o; + return Objects.equals(views, that.views); + } + + @Override + public int hashCode() { + return Objects.hash(views); + } + + @Override + public String toString() { + return new ToStringer(ExportRunOutputPb.class).add("views", views).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java index 6cdd91a27..0fbb51575 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Export and retrieve a job run */ @Generated +@JsonSerialize(using = ExportRunRequest.ExportRunRequestSerializer.class) +@JsonDeserialize(using = ExportRunRequest.ExportRunRequestDeserializer.class) public class ExportRunRequest { /** The canonical identifier for the run. This field is required. */ - @JsonIgnore - @QueryParam("run_id") private Long runId; /** Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE. */ - @JsonIgnore - @QueryParam("views_to_export") private ViewsToExport viewsToExport; public ExportRunRequest setRunId(Long runId) { @@ -59,4 +65,40 @@ public String toString() { .add("viewsToExport", viewsToExport) .toString(); } + + ExportRunRequestPb toPb() { + ExportRunRequestPb pb = new ExportRunRequestPb(); + pb.setRunId(runId); + pb.setViewsToExport(viewsToExport); + + return pb; + } + + static ExportRunRequest fromPb(ExportRunRequestPb pb) { + ExportRunRequest model = new ExportRunRequest(); + model.setRunId(pb.getRunId()); + model.setViewsToExport(pb.getViewsToExport()); + + return model; + } + + public static class ExportRunRequestSerializer extends JsonSerializer { + @Override + public void serialize(ExportRunRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportRunRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportRunRequestDeserializer extends JsonDeserializer { + @Override + public ExportRunRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportRunRequestPb pb = mapper.readValue(p, ExportRunRequestPb.class); + return ExportRunRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequestPb.java new file mode 100755 index 000000000..28a5b8dc5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Export and retrieve a job run */ +@Generated +class ExportRunRequestPb { + @JsonIgnore + @QueryParam("run_id") + private Long runId; + + @JsonIgnore + @QueryParam("views_to_export") + private ViewsToExport viewsToExport; + + public ExportRunRequestPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + public ExportRunRequestPb setViewsToExport(ViewsToExport viewsToExport) { + this.viewsToExport = viewsToExport; + return this; + } + + public ViewsToExport getViewsToExport() { + return viewsToExport; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportRunRequestPb that = (ExportRunRequestPb) o; + return Objects.equals(runId, that.runId) && Objects.equals(viewsToExport, that.viewsToExport); + } + + @Override + public int hashCode() { + return Objects.hash(runId, viewsToExport); + } + + @Override + public String toString() { + return new ToStringer(ExportRunRequestPb.class) + .add("runId", runId) + .add("viewsToExport", viewsToExport) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java index bbebd02f0..c63177a42 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java @@ -4,23 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = FileArrivalTriggerConfiguration.FileArrivalTriggerConfigurationSerializer.class) +@JsonDeserialize( + using = FileArrivalTriggerConfiguration.FileArrivalTriggerConfigurationDeserializer.class) public class FileArrivalTriggerConfiguration { /** * If set, the trigger starts a run only after the specified amount of time passed since the last * time the trigger fired. The minimum allowed value is 60 seconds */ - @JsonProperty("min_time_between_triggers_seconds") private Long minTimeBetweenTriggersSeconds; /** * URL to be monitored for file arrivals. The path must point to the root or a subpath of the * external location. */ - @JsonProperty("url") private String url; /** @@ -28,7 +39,6 @@ public class FileArrivalTriggerConfiguration { * amount of time. This makes it possible to wait for a batch of incoming files to arrive before * triggering a run. The minimum allowed value is 60 seconds. */ - @JsonProperty("wait_after_last_change_seconds") private Long waitAfterLastChangeSeconds; public FileArrivalTriggerConfiguration setMinTimeBetweenTriggersSeconds( @@ -83,4 +93,46 @@ public String toString() { .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds) .toString(); } + + FileArrivalTriggerConfigurationPb toPb() { + FileArrivalTriggerConfigurationPb pb = new FileArrivalTriggerConfigurationPb(); + pb.setMinTimeBetweenTriggersSeconds(minTimeBetweenTriggersSeconds); + pb.setUrl(url); + pb.setWaitAfterLastChangeSeconds(waitAfterLastChangeSeconds); + + return pb; + } + + static FileArrivalTriggerConfiguration fromPb(FileArrivalTriggerConfigurationPb pb) { + FileArrivalTriggerConfiguration model = new FileArrivalTriggerConfiguration(); + model.setMinTimeBetweenTriggersSeconds(pb.getMinTimeBetweenTriggersSeconds()); + model.setUrl(pb.getUrl()); + model.setWaitAfterLastChangeSeconds(pb.getWaitAfterLastChangeSeconds()); + + return model; + } + + public static class FileArrivalTriggerConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + FileArrivalTriggerConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileArrivalTriggerConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileArrivalTriggerConfigurationDeserializer + extends JsonDeserializer { + @Override + public FileArrivalTriggerConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileArrivalTriggerConfigurationPb pb = + mapper.readValue(p, FileArrivalTriggerConfigurationPb.class); + return FileArrivalTriggerConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfigurationPb.java new file mode 100755 index 000000000..1a3d5c61a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfigurationPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileArrivalTriggerConfigurationPb { + @JsonProperty("min_time_between_triggers_seconds") + private Long minTimeBetweenTriggersSeconds; + + @JsonProperty("url") + private String url; + + @JsonProperty("wait_after_last_change_seconds") + private Long waitAfterLastChangeSeconds; + + public FileArrivalTriggerConfigurationPb setMinTimeBetweenTriggersSeconds( + Long minTimeBetweenTriggersSeconds) { + this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds; + return this; + } + + public Long getMinTimeBetweenTriggersSeconds() { + return minTimeBetweenTriggersSeconds; + } + + public FileArrivalTriggerConfigurationPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + public FileArrivalTriggerConfigurationPb setWaitAfterLastChangeSeconds( + Long waitAfterLastChangeSeconds) { + this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds; + return this; + } + + public Long getWaitAfterLastChangeSeconds() { + return waitAfterLastChangeSeconds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileArrivalTriggerConfigurationPb that = (FileArrivalTriggerConfigurationPb) o; + return Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds) + && Objects.equals(url, that.url) + && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds); + } + + @Override + public int hashCode() { + return Objects.hash(minTimeBetweenTriggersSeconds, url, waitAfterLastChangeSeconds); + } + + @Override + public String toString() { + return new ToStringer(FileArrivalTriggerConfigurationPb.class) + .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds) + .add("url", url) + .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java index 62611e459..4e4c7e273 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ForEachStats.ForEachStatsSerializer.class) +@JsonDeserialize(using = ForEachStats.ForEachStatsDeserializer.class) public class ForEachStats { /** Sample of 3 most common error messages occurred during the iteration. */ - @JsonProperty("error_message_stats") private Collection errorMessageStats; /** Describes stats of the iteration. Only latest retries are considered. */ - @JsonProperty("task_run_stats") private ForEachTaskTaskRunStats taskRunStats; public ForEachStats setErrorMessageStats( @@ -58,4 +67,39 @@ public String toString() { .add("taskRunStats", taskRunStats) .toString(); } + + ForEachStatsPb toPb() { + ForEachStatsPb pb = new ForEachStatsPb(); + pb.setErrorMessageStats(errorMessageStats); + pb.setTaskRunStats(taskRunStats); + + return pb; + } + + static ForEachStats fromPb(ForEachStatsPb pb) { + ForEachStats model = new ForEachStats(); + model.setErrorMessageStats(pb.getErrorMessageStats()); + model.setTaskRunStats(pb.getTaskRunStats()); + + return model; + } + + public static class ForEachStatsSerializer extends JsonSerializer { + @Override + public void serialize(ForEachStats value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForEachStatsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForEachStatsDeserializer extends JsonDeserializer { + @Override + public ForEachStats deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForEachStatsPb pb = mapper.readValue(p, ForEachStatsPb.class); + return ForEachStats.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStatsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStatsPb.java new file mode 100755 index 000000000..d10d6a465 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStatsPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ForEachStatsPb { + @JsonProperty("error_message_stats") + private Collection errorMessageStats; + + @JsonProperty("task_run_stats") + private ForEachTaskTaskRunStats taskRunStats; + + public ForEachStatsPb setErrorMessageStats( + Collection errorMessageStats) { + this.errorMessageStats = errorMessageStats; + return this; + } + + public Collection getErrorMessageStats() { + return errorMessageStats; + } + + public ForEachStatsPb setTaskRunStats(ForEachTaskTaskRunStats taskRunStats) { + this.taskRunStats = taskRunStats; + return this; + } + + public ForEachTaskTaskRunStats getTaskRunStats() { + return taskRunStats; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForEachStatsPb that = (ForEachStatsPb) o; + return Objects.equals(errorMessageStats, that.errorMessageStats) + && Objects.equals(taskRunStats, that.taskRunStats); + } + + @Override + public int hashCode() { + return Objects.hash(errorMessageStats, taskRunStats); + } + + @Override + public String toString() { + return new ToStringer(ForEachStatsPb.class) + .add("errorMessageStats", errorMessageStats) + .add("taskRunStats", taskRunStats) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java index a4d141303..6b696d18b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java @@ -4,26 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ForEachTask.ForEachTaskSerializer.class) +@JsonDeserialize(using = ForEachTask.ForEachTaskDeserializer.class) public class ForEachTask { /** * An optional maximum allowed number of concurrent runs of the task. Set this value if you want * to be able to execute multiple runs of the task concurrently. */ - @JsonProperty("concurrency") private Long concurrency; /** * Array for task to iterate on. This can be a JSON string or a reference to an array parameter. */ - @JsonProperty("inputs") private String inputs; /** Configuration for the task that will be run for each element in the array */ - @JsonProperty("task") private Task task; public ForEachTask setConcurrency(Long concurrency) { @@ -76,4 +84,41 @@ public String toString() { .add("task", task) .toString(); } + + ForEachTaskPb toPb() { + ForEachTaskPb pb = new ForEachTaskPb(); + pb.setConcurrency(concurrency); + pb.setInputs(inputs); + pb.setTask(task); + + return pb; + } + + static ForEachTask fromPb(ForEachTaskPb pb) { + ForEachTask model = new ForEachTask(); + model.setConcurrency(pb.getConcurrency()); + model.setInputs(pb.getInputs()); + model.setTask(pb.getTask()); + + return model; + } + + public static class ForEachTaskSerializer extends JsonSerializer { + @Override + public void serialize(ForEachTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForEachTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForEachTaskDeserializer extends JsonDeserializer { + @Override + public ForEachTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForEachTaskPb pb = mapper.readValue(p, ForEachTaskPb.class); + return ForEachTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java index 4546b54d5..a9afa3fe3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ForEachTaskErrorMessageStats.ForEachTaskErrorMessageStatsSerializer.class) +@JsonDeserialize( + using = ForEachTaskErrorMessageStats.ForEachTaskErrorMessageStatsDeserializer.class) public class ForEachTaskErrorMessageStats { /** Describes the count of such error message encountered during the iterations. */ - @JsonProperty("count") private Long count; /** Describes the error message occured during the iterations. */ - @JsonProperty("error_message") private String errorMessage; /** Describes the termination reason for the error message. */ - @JsonProperty("termination_category") private String terminationCategory; public ForEachTaskErrorMessageStats setCount(Long count) { @@ -71,4 +80,45 @@ public String toString() { .add("terminationCategory", terminationCategory) .toString(); } + + ForEachTaskErrorMessageStatsPb toPb() { + ForEachTaskErrorMessageStatsPb pb = new ForEachTaskErrorMessageStatsPb(); + pb.setCount(count); + pb.setErrorMessage(errorMessage); + pb.setTerminationCategory(terminationCategory); + + return pb; + } + + static ForEachTaskErrorMessageStats fromPb(ForEachTaskErrorMessageStatsPb pb) { + ForEachTaskErrorMessageStats model = new ForEachTaskErrorMessageStats(); + model.setCount(pb.getCount()); + model.setErrorMessage(pb.getErrorMessage()); + model.setTerminationCategory(pb.getTerminationCategory()); + + return model; + } + + public static class ForEachTaskErrorMessageStatsSerializer + extends JsonSerializer { + @Override + public void serialize( + ForEachTaskErrorMessageStats value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForEachTaskErrorMessageStatsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForEachTaskErrorMessageStatsDeserializer + extends JsonDeserializer { + @Override + public ForEachTaskErrorMessageStats deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForEachTaskErrorMessageStatsPb pb = mapper.readValue(p, ForEachTaskErrorMessageStatsPb.class); + return ForEachTaskErrorMessageStats.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStatsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStatsPb.java new file mode 100755 index 000000000..104ff0f37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStatsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ForEachTaskErrorMessageStatsPb { + @JsonProperty("count") + private Long count; + + @JsonProperty("error_message") + private String errorMessage; + + @JsonProperty("termination_category") + private String terminationCategory; + + public ForEachTaskErrorMessageStatsPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ForEachTaskErrorMessageStatsPb setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public ForEachTaskErrorMessageStatsPb setTerminationCategory(String terminationCategory) { + this.terminationCategory = terminationCategory; + return this; + } + + public String getTerminationCategory() { + return terminationCategory; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForEachTaskErrorMessageStatsPb that = (ForEachTaskErrorMessageStatsPb) o; + return Objects.equals(count, that.count) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(terminationCategory, that.terminationCategory); + } + + @Override + public int hashCode() { + return Objects.hash(count, errorMessage, terminationCategory); + } + + @Override + public String toString() { + return new ToStringer(ForEachTaskErrorMessageStatsPb.class) + .add("count", count) + .add("errorMessage", errorMessage) + .add("terminationCategory", terminationCategory) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskPb.java new file mode 100755 index 000000000..6ab036d11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ForEachTaskPb { + @JsonProperty("concurrency") + private Long concurrency; + + @JsonProperty("inputs") + private String inputs; + + @JsonProperty("task") + private Task task; + + public ForEachTaskPb setConcurrency(Long concurrency) { + this.concurrency = concurrency; + return this; + } + + public Long getConcurrency() { + return concurrency; + } + + public ForEachTaskPb setInputs(String inputs) { + this.inputs = inputs; + return this; + } + + public String getInputs() { + return inputs; + } + + public ForEachTaskPb setTask(Task task) { + this.task = task; + return this; + } + + public Task getTask() { + return task; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForEachTaskPb that = (ForEachTaskPb) o; + return Objects.equals(concurrency, that.concurrency) + && Objects.equals(inputs, that.inputs) + && Objects.equals(task, that.task); + } + + @Override + public int hashCode() { + return Objects.hash(concurrency, inputs, task); + } + + @Override + public String toString() { + return new ToStringer(ForEachTaskPb.class) + .add("concurrency", concurrency) + .add("inputs", inputs) + .add("task", task) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStats.java index 48ebdb15b..c4fe9699b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStats.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ForEachTaskTaskRunStats.ForEachTaskTaskRunStatsSerializer.class) +@JsonDeserialize(using = ForEachTaskTaskRunStats.ForEachTaskTaskRunStatsDeserializer.class) public class ForEachTaskTaskRunStats { /** Describes the iteration runs having an active lifecycle state or an active run sub state. */ - @JsonProperty("active_iterations") private Long activeIterations; /** Describes the number of failed and succeeded iteration runs. */ - @JsonProperty("completed_iterations") private Long completedIterations; /** Describes the number of failed iteration runs. */ - @JsonProperty("failed_iterations") private Long failedIterations; /** Describes the number of iteration runs that have been scheduled. */ - @JsonProperty("scheduled_iterations") private Long scheduledIterations; /** Describes the number of succeeded iteration runs. */ - @JsonProperty("succeeded_iterations") private Long succeededIterations; /** Describes the length of the list of items to iterate over. */ - @JsonProperty("total_iterations") private Long totalIterations; public ForEachTaskTaskRunStats setActiveIterations(Long activeIterations) { @@ -122,4 +127,51 @@ public String toString() { .add("totalIterations", totalIterations) .toString(); } + + ForEachTaskTaskRunStatsPb toPb() { + ForEachTaskTaskRunStatsPb pb = new ForEachTaskTaskRunStatsPb(); + pb.setActiveIterations(activeIterations); + pb.setCompletedIterations(completedIterations); + pb.setFailedIterations(failedIterations); + pb.setScheduledIterations(scheduledIterations); + pb.setSucceededIterations(succeededIterations); + pb.setTotalIterations(totalIterations); + + return pb; + } + + static ForEachTaskTaskRunStats fromPb(ForEachTaskTaskRunStatsPb pb) { + ForEachTaskTaskRunStats model = new ForEachTaskTaskRunStats(); + model.setActiveIterations(pb.getActiveIterations()); + model.setCompletedIterations(pb.getCompletedIterations()); + model.setFailedIterations(pb.getFailedIterations()); + model.setScheduledIterations(pb.getScheduledIterations()); + model.setSucceededIterations(pb.getSucceededIterations()); + model.setTotalIterations(pb.getTotalIterations()); + + return model; + } + + public static class ForEachTaskTaskRunStatsSerializer + extends JsonSerializer { + @Override + public void serialize( + ForEachTaskTaskRunStats value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForEachTaskTaskRunStatsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForEachTaskTaskRunStatsDeserializer + extends JsonDeserializer { + @Override + public ForEachTaskTaskRunStats deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForEachTaskTaskRunStatsPb pb = mapper.readValue(p, ForEachTaskTaskRunStatsPb.class); + return ForEachTaskTaskRunStats.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStatsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStatsPb.java new file mode 100755 index 000000000..0bd6f7fb6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStatsPb.java @@ -0,0 +1,119 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ForEachTaskTaskRunStatsPb { + @JsonProperty("active_iterations") + private Long activeIterations; + + @JsonProperty("completed_iterations") + private Long completedIterations; + + @JsonProperty("failed_iterations") + private Long failedIterations; + + @JsonProperty("scheduled_iterations") + private Long scheduledIterations; + + @JsonProperty("succeeded_iterations") + private Long succeededIterations; + + @JsonProperty("total_iterations") + private Long totalIterations; + + public ForEachTaskTaskRunStatsPb setActiveIterations(Long activeIterations) { + this.activeIterations = activeIterations; + return this; + } + + public Long getActiveIterations() { + return activeIterations; + } + + public ForEachTaskTaskRunStatsPb setCompletedIterations(Long completedIterations) { + this.completedIterations = completedIterations; + return this; + } + + public Long getCompletedIterations() { + return completedIterations; + } + + public ForEachTaskTaskRunStatsPb setFailedIterations(Long failedIterations) { + this.failedIterations = failedIterations; + return this; + } + + public Long getFailedIterations() { + return failedIterations; + } + + public ForEachTaskTaskRunStatsPb setScheduledIterations(Long scheduledIterations) { + this.scheduledIterations = scheduledIterations; + return this; + } + + public Long getScheduledIterations() { + return scheduledIterations; + } + + public ForEachTaskTaskRunStatsPb setSucceededIterations(Long succeededIterations) { + this.succeededIterations = succeededIterations; + return this; + } + + public Long getSucceededIterations() { + return succeededIterations; + } + + public ForEachTaskTaskRunStatsPb setTotalIterations(Long totalIterations) { + this.totalIterations = totalIterations; + return this; + } + + public Long getTotalIterations() { + return totalIterations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForEachTaskTaskRunStatsPb that = (ForEachTaskTaskRunStatsPb) o; + return Objects.equals(activeIterations, that.activeIterations) + && Objects.equals(completedIterations, that.completedIterations) + && Objects.equals(failedIterations, that.failedIterations) + && Objects.equals(scheduledIterations, that.scheduledIterations) + && Objects.equals(succeededIterations, that.succeededIterations) + && Objects.equals(totalIterations, that.totalIterations); + } + + @Override + public int hashCode() { + return Objects.hash( + activeIterations, + completedIterations, + failedIterations, + scheduledIterations, + succeededIterations, + totalIterations); + } + + @Override + public String toString() { + return new ToStringer(ForEachTaskTaskRunStatsPb.class) + .add("activeIterations", activeIterations) + .add("completedIterations", completedIterations) + .add("failedIterations", failedIterations) + .add("scheduledIterations", scheduledIterations) + .add("succeededIterations", succeededIterations) + .add("totalIterations", totalIterations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java index 28df26085..4e5e69fc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenAiComputeTask.GenAiComputeTaskSerializer.class) +@JsonDeserialize(using = GenAiComputeTask.GenAiComputeTaskDeserializer.class) public class GenAiComputeTask { /** Command launcher to run the actual script, e.g. bash, python etc. */ - @JsonProperty("command") private String command; /** */ - @JsonProperty("compute") private ComputeConfig compute; /** Runtime image */ - @JsonProperty("dl_runtime_image") private String dlRuntimeImage; /** * Optional string containing the name of the MLflow experiment to log the run to. If name is not * found, backend will create the mlflow experiment using the name. */ - @JsonProperty("mlflow_experiment_name") private String mlflowExperimentName; /** @@ -35,7 +42,6 @@ public class GenAiComputeTask { * if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Script is located in * Databricks workspace. * `GIT`: Script is located in cloud Git provider. */ - @JsonProperty("source") private Source source; /** @@ -44,18 +50,15 @@ public class GenAiComputeTask { * the path must be absolute and begin with `/`. For files stored in a remote repository, the path * must be relative. This field is required. */ - @JsonProperty("training_script_path") private String trainingScriptPath; /** * Optional string containing model parameters passed to the training script in yaml format. If * present, then the content in yaml_parameters_file_path will be ignored. */ - @JsonProperty("yaml_parameters") private String yamlParameters; /** Optional path to a YAML file containing model parameters passed to the training script. */ - @JsonProperty("yaml_parameters_file_path") private String yamlParametersFilePath; public GenAiComputeTask setCommand(String command) { @@ -171,4 +174,52 @@ public String toString() { .add("yamlParametersFilePath", yamlParametersFilePath) .toString(); } + + GenAiComputeTaskPb toPb() { + GenAiComputeTaskPb pb = new GenAiComputeTaskPb(); + pb.setCommand(command); + pb.setCompute(compute); + pb.setDlRuntimeImage(dlRuntimeImage); + pb.setMlflowExperimentName(mlflowExperimentName); + pb.setSource(source); + pb.setTrainingScriptPath(trainingScriptPath); + pb.setYamlParameters(yamlParameters); + pb.setYamlParametersFilePath(yamlParametersFilePath); + + return pb; + } + + static GenAiComputeTask fromPb(GenAiComputeTaskPb pb) { + GenAiComputeTask model = new GenAiComputeTask(); + model.setCommand(pb.getCommand()); + model.setCompute(pb.getCompute()); + model.setDlRuntimeImage(pb.getDlRuntimeImage()); + model.setMlflowExperimentName(pb.getMlflowExperimentName()); + model.setSource(pb.getSource()); + model.setTrainingScriptPath(pb.getTrainingScriptPath()); + model.setYamlParameters(pb.getYamlParameters()); + model.setYamlParametersFilePath(pb.getYamlParametersFilePath()); + + return model; + } + + public static class GenAiComputeTaskSerializer extends JsonSerializer { + @Override + public void serialize(GenAiComputeTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenAiComputeTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenAiComputeTaskDeserializer extends JsonDeserializer { + @Override + public GenAiComputeTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenAiComputeTaskPb pb = mapper.readValue(p, GenAiComputeTaskPb.class); + return GenAiComputeTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTaskPb.java new file mode 100755 index 000000000..6ac374431 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTaskPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenAiComputeTaskPb { + @JsonProperty("command") + private String command; + + @JsonProperty("compute") + private ComputeConfig compute; + + @JsonProperty("dl_runtime_image") + private String dlRuntimeImage; + + @JsonProperty("mlflow_experiment_name") + private String mlflowExperimentName; + + @JsonProperty("source") + private Source source; + + @JsonProperty("training_script_path") + private String trainingScriptPath; + + @JsonProperty("yaml_parameters") + private String yamlParameters; + + @JsonProperty("yaml_parameters_file_path") + private String yamlParametersFilePath; + + public GenAiComputeTaskPb setCommand(String command) { + this.command = command; + return this; + } + + public String getCommand() { + return command; + } + + public GenAiComputeTaskPb setCompute(ComputeConfig compute) { + this.compute = compute; + return this; + } + + public ComputeConfig getCompute() { + return compute; + } + + public GenAiComputeTaskPb setDlRuntimeImage(String dlRuntimeImage) { + this.dlRuntimeImage = dlRuntimeImage; + return this; + } + + public String getDlRuntimeImage() { + return dlRuntimeImage; + } + + public GenAiComputeTaskPb setMlflowExperimentName(String mlflowExperimentName) { + this.mlflowExperimentName = mlflowExperimentName; + return this; + } + + public String getMlflowExperimentName() { + return mlflowExperimentName; + } + + public GenAiComputeTaskPb setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + public GenAiComputeTaskPb setTrainingScriptPath(String trainingScriptPath) { + this.trainingScriptPath = trainingScriptPath; + return this; + } + + public String getTrainingScriptPath() { + return trainingScriptPath; + } + + public GenAiComputeTaskPb setYamlParameters(String yamlParameters) { + this.yamlParameters = yamlParameters; + return this; + } + + public String getYamlParameters() { + return yamlParameters; + } + + public GenAiComputeTaskPb setYamlParametersFilePath(String yamlParametersFilePath) { + this.yamlParametersFilePath = yamlParametersFilePath; + return this; + } + + public String getYamlParametersFilePath() { + return yamlParametersFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenAiComputeTaskPb that = (GenAiComputeTaskPb) o; + return Objects.equals(command, that.command) + && Objects.equals(compute, that.compute) + && Objects.equals(dlRuntimeImage, that.dlRuntimeImage) + && Objects.equals(mlflowExperimentName, that.mlflowExperimentName) + && Objects.equals(source, that.source) + && Objects.equals(trainingScriptPath, that.trainingScriptPath) + && Objects.equals(yamlParameters, that.yamlParameters) + && Objects.equals(yamlParametersFilePath, that.yamlParametersFilePath); + } + + @Override + public int hashCode() { + return Objects.hash( + command, + compute, + dlRuntimeImage, + mlflowExperimentName, + source, + trainingScriptPath, + yamlParameters, + yamlParametersFilePath); + } + + @Override + public String toString() { + return new ToStringer(GenAiComputeTaskPb.class) + .add("command", command) + .add("compute", compute) + .add("dlRuntimeImage", dlRuntimeImage) + .add("mlflowExperimentName", mlflowExperimentName) + .add("source", source) + .add("trainingScriptPath", trainingScriptPath) + .add("yamlParameters", yamlParameters) + .add("yamlParametersFilePath", yamlParametersFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequest.java index 6bb25631c..c16cbb224 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get job permission levels */ @Generated +@JsonSerialize(using = GetJobPermissionLevelsRequest.GetJobPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = GetJobPermissionLevelsRequest.GetJobPermissionLevelsRequestDeserializer.class) public class GetJobPermissionLevelsRequest { /** The job for which to get or manage permissions. */ - @JsonIgnore private String jobId; + private String jobId; public GetJobPermissionLevelsRequest setJobId(String jobId) { this.jobId = jobId; @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetJobPermissionLevelsRequest.class).add("jobId", jobId).toString(); } + + GetJobPermissionLevelsRequestPb toPb() { + GetJobPermissionLevelsRequestPb pb = new GetJobPermissionLevelsRequestPb(); + pb.setJobId(jobId); + + return pb; + } + + static GetJobPermissionLevelsRequest fromPb(GetJobPermissionLevelsRequestPb pb) { + GetJobPermissionLevelsRequest model = new GetJobPermissionLevelsRequest(); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class GetJobPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetJobPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetJobPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetJobPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetJobPermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetJobPermissionLevelsRequestPb pb = + mapper.readValue(p, GetJobPermissionLevelsRequestPb.class); + return GetJobPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequestPb.java new file mode 100755 index 000000000..4ed5e48f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get job permission levels */ +@Generated +class GetJobPermissionLevelsRequestPb { + @JsonIgnore private String jobId; + + public GetJobPermissionLevelsRequestPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetJobPermissionLevelsRequestPb that = (GetJobPermissionLevelsRequestPb) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public String toString() { + return new ToStringer(GetJobPermissionLevelsRequestPb.class).add("jobId", jobId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponse.java index 5ebfc49e2..f53600e0b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetJobPermissionLevelsResponse.GetJobPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = GetJobPermissionLevelsResponse.GetJobPermissionLevelsResponseDeserializer.class) public class GetJobPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetJobPermissionLevelsResponse setPermissionLevels( @@ -43,4 +55,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetJobPermissionLevelsResponsePb toPb() { + GetJobPermissionLevelsResponsePb pb = new GetJobPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetJobPermissionLevelsResponse fromPb(GetJobPermissionLevelsResponsePb pb) { + GetJobPermissionLevelsResponse model = new GetJobPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetJobPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetJobPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetJobPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetJobPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetJobPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetJobPermissionLevelsResponsePb pb = + mapper.readValue(p, GetJobPermissionLevelsResponsePb.class); + return GetJobPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponsePb.java new file mode 100755 index 000000000..7f37edc4e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetJobPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetJobPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetJobPermissionLevelsResponsePb that = (GetJobPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetJobPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequest.java index fffcf7444..b98e2ffb9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get job permissions */ @Generated +@JsonSerialize(using = GetJobPermissionsRequest.GetJobPermissionsRequestSerializer.class) +@JsonDeserialize(using = GetJobPermissionsRequest.GetJobPermissionsRequestDeserializer.class) public class GetJobPermissionsRequest { /** The job for which to get or manage permissions. */ - @JsonIgnore private String jobId; + private String jobId; public GetJobPermissionsRequest setJobId(String jobId) { this.jobId = jobId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetJobPermissionsRequest.class).add("jobId", jobId).toString(); } + + GetJobPermissionsRequestPb toPb() { + GetJobPermissionsRequestPb pb = new GetJobPermissionsRequestPb(); + pb.setJobId(jobId); + + return pb; + } + + static GetJobPermissionsRequest fromPb(GetJobPermissionsRequestPb pb) { + GetJobPermissionsRequest model = new GetJobPermissionsRequest(); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class GetJobPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetJobPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetJobPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetJobPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetJobPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetJobPermissionsRequestPb pb = mapper.readValue(p, GetJobPermissionsRequestPb.class); + return GetJobPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequestPb.java new file mode 100755 index 000000000..210b0f711 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get job permissions */ +@Generated +class GetJobPermissionsRequestPb { + @JsonIgnore private String jobId; + + public GetJobPermissionsRequestPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetJobPermissionsRequestPb that = (GetJobPermissionsRequestPb) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public String toString() { + return new ToStringer(GetJobPermissionsRequestPb.class).add("jobId", jobId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java index 9a1064c89..6a03e49ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a single job */ @Generated +@JsonSerialize(using = GetJobRequest.GetJobRequestSerializer.class) +@JsonDeserialize(using = GetJobRequest.GetJobRequestDeserializer.class) public class GetJobRequest { /** The canonical identifier of the job to retrieve information about. This field is required. */ - @JsonIgnore - @QueryParam("job_id") private Long jobId; /** * Use `next_page_token` returned from the previous GetJob response to request the next page of * the job's array properties. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public GetJobRequest setJobId(Long jobId) { @@ -62,4 +68,39 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + GetJobRequestPb toPb() { + GetJobRequestPb pb = new GetJobRequestPb(); + pb.setJobId(jobId); + pb.setPageToken(pageToken); + + return pb; + } + + static GetJobRequest fromPb(GetJobRequestPb pb) { + GetJobRequest model = new GetJobRequest(); + model.setJobId(pb.getJobId()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class GetJobRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetJobRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetJobRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetJobRequestDeserializer extends JsonDeserializer { + @Override + public GetJobRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetJobRequestPb pb = mapper.readValue(p, GetJobRequestPb.class); + return GetJobRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequestPb.java new file mode 100755 index 000000000..395faa2a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a single job */ +@Generated +class GetJobRequestPb { + @JsonIgnore + @QueryParam("job_id") + private Long jobId; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public GetJobRequestPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public GetJobRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetJobRequestPb that = (GetJobRequestPb) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GetJobRequestPb.class) + .add("jobId", jobId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequest.java index 8af3d01c7..6474f55ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get job policy compliance */ @Generated +@JsonSerialize(using = GetPolicyComplianceRequest.GetPolicyComplianceRequestSerializer.class) +@JsonDeserialize(using = GetPolicyComplianceRequest.GetPolicyComplianceRequestDeserializer.class) public class GetPolicyComplianceRequest { /** The ID of the job whose compliance status you are requesting. */ - @JsonIgnore - @QueryParam("job_id") private Long jobId; public GetPolicyComplianceRequest setJobId(Long jobId) { @@ -42,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetPolicyComplianceRequest.class).add("jobId", jobId).toString(); } + + GetPolicyComplianceRequestPb toPb() { + GetPolicyComplianceRequestPb pb = new GetPolicyComplianceRequestPb(); + pb.setJobId(jobId); + + return pb; + } + + static GetPolicyComplianceRequest fromPb(GetPolicyComplianceRequestPb pb) { + GetPolicyComplianceRequest model = new GetPolicyComplianceRequest(); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class GetPolicyComplianceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPolicyComplianceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPolicyComplianceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPolicyComplianceRequestDeserializer + extends JsonDeserializer { + @Override + public GetPolicyComplianceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPolicyComplianceRequestPb pb = mapper.readValue(p, GetPolicyComplianceRequestPb.class); + return GetPolicyComplianceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequestPb.java new file mode 100755 index 000000000..75a6973d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get job policy compliance */ +@Generated +class GetPolicyComplianceRequestPb { + @JsonIgnore + @QueryParam("job_id") + private Long jobId; + + public GetPolicyComplianceRequestPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPolicyComplianceRequestPb that = (GetPolicyComplianceRequestPb) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public String toString() { + return new ToStringer(GetPolicyComplianceRequestPb.class).add("jobId", jobId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponse.java index f14bf3f10..14c8069d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponse.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = GetPolicyComplianceResponse.GetPolicyComplianceResponseSerializer.class) +@JsonDeserialize(using = GetPolicyComplianceResponse.GetPolicyComplianceResponseDeserializer.class) public class GetPolicyComplianceResponse { /** * Whether the job is compliant with its policies or not. Jobs could be out of compliance if a * policy they are using was updated after the job was last edited and some of its job clusters no * longer comply with their updated policies. */ - @JsonProperty("is_compliant") private Boolean isCompliant; /** @@ -24,7 +34,6 @@ public class GetPolicyComplianceResponse { * the job cluster is prepended to the path. The values indicate an error message describing the * policy validation error. */ - @JsonProperty("violations") private Map violations; public GetPolicyComplianceResponse setIsCompliant(Boolean isCompliant) { @@ -66,4 +75,43 @@ public String toString() { .add("violations", violations) .toString(); } + + GetPolicyComplianceResponsePb toPb() { + GetPolicyComplianceResponsePb pb = new GetPolicyComplianceResponsePb(); + pb.setIsCompliant(isCompliant); + pb.setViolations(violations); + + return pb; + } + + static GetPolicyComplianceResponse fromPb(GetPolicyComplianceResponsePb pb) { + GetPolicyComplianceResponse model = new GetPolicyComplianceResponse(); + model.setIsCompliant(pb.getIsCompliant()); + model.setViolations(pb.getViolations()); + + return model; + } + + public static class GetPolicyComplianceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPolicyComplianceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPolicyComplianceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPolicyComplianceResponseDeserializer + extends JsonDeserializer { + @Override + public GetPolicyComplianceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPolicyComplianceResponsePb pb = mapper.readValue(p, GetPolicyComplianceResponsePb.class); + return GetPolicyComplianceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponsePb.java new file mode 100755 index 000000000..01e11ee6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetPolicyComplianceResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class GetPolicyComplianceResponsePb { + @JsonProperty("is_compliant") + private Boolean isCompliant; + + @JsonProperty("violations") + private Map violations; + + public GetPolicyComplianceResponsePb setIsCompliant(Boolean isCompliant) { + this.isCompliant = isCompliant; + return this; + } + + public Boolean getIsCompliant() { + return isCompliant; + } + + public GetPolicyComplianceResponsePb setViolations(Map violations) { + this.violations = violations; + return this; + } + + public Map getViolations() { + return violations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPolicyComplianceResponsePb that = (GetPolicyComplianceResponsePb) o; + return Objects.equals(isCompliant, that.isCompliant) + && Objects.equals(violations, that.violations); + } + + @Override + public int hashCode() { + return Objects.hash(isCompliant, violations); + } + + @Override + public String toString() { + return new ToStringer(GetPolicyComplianceResponsePb.class) + .add("isCompliant", isCompliant) + .add("violations", violations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java index ae51416ad..1a3dfb7e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the output for a single run */ @Generated +@JsonSerialize(using = GetRunOutputRequest.GetRunOutputRequestSerializer.class) +@JsonDeserialize(using = GetRunOutputRequest.GetRunOutputRequestDeserializer.class) public class GetRunOutputRequest { /** The canonical identifier for the run. */ - @JsonIgnore - @QueryParam("run_id") private Long runId; public GetRunOutputRequest setRunId(Long runId) { @@ -42,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetRunOutputRequest.class).add("runId", runId).toString(); } + + GetRunOutputRequestPb toPb() { + GetRunOutputRequestPb pb = new GetRunOutputRequestPb(); + pb.setRunId(runId); + + return pb; + } + + static GetRunOutputRequest fromPb(GetRunOutputRequestPb pb) { + GetRunOutputRequest model = new GetRunOutputRequest(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class GetRunOutputRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRunOutputRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRunOutputRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRunOutputRequestDeserializer + extends JsonDeserializer { + @Override + public GetRunOutputRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRunOutputRequestPb pb = mapper.readValue(p, GetRunOutputRequestPb.class); + return GetRunOutputRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequestPb.java new file mode 100755 index 000000000..21ba99b7f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the output for a single run */ +@Generated +class GetRunOutputRequestPb { + @JsonIgnore + @QueryParam("run_id") + private Long runId; + + public GetRunOutputRequestPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRunOutputRequestPb that = (GetRunOutputRequestPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(GetRunOutputRequestPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java index afa115088..01dd2763a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java @@ -3,37 +3,39 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a single job run */ @Generated +@JsonSerialize(using = GetRunRequest.GetRunRequestSerializer.class) +@JsonDeserialize(using = GetRunRequest.GetRunRequestDeserializer.class) public class GetRunRequest { /** Whether to include the repair history in the response. */ - @JsonIgnore - @QueryParam("include_history") private Boolean includeHistory; /** Whether to include resolved parameter values in the response. */ - @JsonIgnore - @QueryParam("include_resolved_values") private Boolean includeResolvedValues; /** * Use `next_page_token` returned from the previous GetRun response to request the next page of * the run's array properties. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * The canonical identifier of the run for which to retrieve the metadata. This field is required. */ - @JsonIgnore - @QueryParam("run_id") private Long runId; public GetRunRequest setIncludeHistory(Boolean includeHistory) { @@ -97,4 +99,43 @@ public String toString() { .add("runId", runId) .toString(); } + + GetRunRequestPb toPb() { + GetRunRequestPb pb = new GetRunRequestPb(); + pb.setIncludeHistory(includeHistory); + pb.setIncludeResolvedValues(includeResolvedValues); + pb.setPageToken(pageToken); + pb.setRunId(runId); + + return pb; + } + + static GetRunRequest fromPb(GetRunRequestPb pb) { + GetRunRequest model = new GetRunRequest(); + model.setIncludeHistory(pb.getIncludeHistory()); + model.setIncludeResolvedValues(pb.getIncludeResolvedValues()); + model.setPageToken(pb.getPageToken()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class GetRunRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRunRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRunRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRunRequestDeserializer extends JsonDeserializer { + @Override + public GetRunRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRunRequestPb pb = mapper.readValue(p, GetRunRequestPb.class); + return GetRunRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequestPb.java new file mode 100755 index 000000000..7606e7856 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a single job run */ +@Generated +class GetRunRequestPb { + @JsonIgnore + @QueryParam("include_history") + private Boolean includeHistory; + + @JsonIgnore + @QueryParam("include_resolved_values") + private Boolean includeResolvedValues; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("run_id") + private Long runId; + + public GetRunRequestPb setIncludeHistory(Boolean includeHistory) { + this.includeHistory = includeHistory; + return this; + } + + public Boolean getIncludeHistory() { + return includeHistory; + } + + public GetRunRequestPb setIncludeResolvedValues(Boolean includeResolvedValues) { + this.includeResolvedValues = includeResolvedValues; + return this; + } + + public Boolean getIncludeResolvedValues() { + return includeResolvedValues; + } + + public GetRunRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GetRunRequestPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRunRequestPb that = (GetRunRequestPb) o; + return Objects.equals(includeHistory, that.includeHistory) + && Objects.equals(includeResolvedValues, that.includeResolvedValues) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(includeHistory, includeResolvedValues, pageToken, runId); + } + + @Override + public String toString() { + return new ToStringer(GetRunRequestPb.class) + .add("includeHistory", includeHistory) + .add("includeResolvedValues", includeResolvedValues) + .add("pageToken", pageToken) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshot.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshot.java index 78dcb66af..8709a067e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshot.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshot.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,13 +21,14 @@ * on job runs. */ @Generated +@JsonSerialize(using = GitSnapshot.GitSnapshotSerializer.class) +@JsonDeserialize(using = GitSnapshot.GitSnapshotDeserializer.class) public class GitSnapshot { /** * Commit that was used to execute the run. If git_branch was specified, this points to the HEAD * of the branch at the time of the run; if git_tag was specified, this points to the commit the * tag points to. */ - @JsonProperty("used_commit") private String usedCommit; public GitSnapshot setUsedCommit(String usedCommit) { @@ -47,4 +57,37 @@ public int hashCode() { public String toString() { return new ToStringer(GitSnapshot.class).add("usedCommit", usedCommit).toString(); } + + GitSnapshotPb toPb() { + GitSnapshotPb pb = new GitSnapshotPb(); + pb.setUsedCommit(usedCommit); + + return pb; + } + + static GitSnapshot fromPb(GitSnapshotPb pb) { + GitSnapshot model = new GitSnapshot(); + model.setUsedCommit(pb.getUsedCommit()); + + return model; + } + + public static class GitSnapshotSerializer extends JsonSerializer { + @Override + public void serialize(GitSnapshot value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GitSnapshotPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GitSnapshotDeserializer extends JsonDeserializer { + @Override + public GitSnapshot deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GitSnapshotPb pb = mapper.readValue(p, GitSnapshotPb.class); + return GitSnapshot.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshotPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshotPb.java new file mode 100755 index 000000000..86a04a2cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSnapshotPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Read-only state of the remote repository at the time the job was run. This field is only included + * on job runs. + */ +@Generated +class GitSnapshotPb { + @JsonProperty("used_commit") + private String usedCommit; + + public GitSnapshotPb setUsedCommit(String usedCommit) { + this.usedCommit = usedCommit; + return this; + } + + public String getUsedCommit() { + return usedCommit; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitSnapshotPb that = (GitSnapshotPb) o; + return Objects.equals(usedCommit, that.usedCommit); + } + + @Override + public int hashCode() { + return Objects.hash(usedCommit); + } + + @Override + public String toString() { + return new ToStringer(GitSnapshotPb.class).add("usedCommit", usedCommit).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java index e14ae5f66..6914f3874 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -18,50 +27,45 @@ * are used, `git_source` must be defined on the job. */ @Generated +@JsonSerialize(using = GitSource.GitSourceSerializer.class) +@JsonDeserialize(using = GitSource.GitSourceDeserializer.class) public class GitSource { /** * Name of the branch to be checked out and used by this job. This field cannot be specified in * conjunction with git_tag or git_commit. */ - @JsonProperty("git_branch") private String gitBranch; /** * Commit to be checked out and used by this job. This field cannot be specified in conjunction * with git_branch or git_tag. */ - @JsonProperty("git_commit") private String gitCommit; /** * Unique identifier of the service used to host the Git repository. The value is case * insensitive. */ - @JsonProperty("git_provider") private GitProvider gitProvider; /** * Read-only state of the remote repository at the time the job was run. This field is only * included on job runs. */ - @JsonProperty("git_snapshot") private GitSnapshot gitSnapshot; /** * Name of the tag to be checked out and used by this job. This field cannot be specified in * conjunction with git_branch or git_commit. */ - @JsonProperty("git_tag") private String gitTag; /** URL of the repository to be cloned by this job. */ - @JsonProperty("git_url") private String gitUrl; /** * The source of the job specification in the remote repository when the job is source controlled. */ - @JsonProperty("job_source") private JobSource jobSource; public GitSource setGitBranch(String gitBranch) { @@ -158,4 +162,49 @@ public String toString() { .add("jobSource", jobSource) .toString(); } + + GitSourcePb toPb() { + GitSourcePb pb = new GitSourcePb(); + pb.setGitBranch(gitBranch); + pb.setGitCommit(gitCommit); + pb.setGitProvider(gitProvider); + pb.setGitSnapshot(gitSnapshot); + pb.setGitTag(gitTag); + pb.setGitUrl(gitUrl); + pb.setJobSource(jobSource); + + return pb; + } + + static GitSource fromPb(GitSourcePb pb) { + GitSource model = new GitSource(); + model.setGitBranch(pb.getGitBranch()); + model.setGitCommit(pb.getGitCommit()); + model.setGitProvider(pb.getGitProvider()); + model.setGitSnapshot(pb.getGitSnapshot()); + model.setGitTag(pb.getGitTag()); + model.setGitUrl(pb.getGitUrl()); + model.setJobSource(pb.getJobSource()); + + return model; + } + + public static class GitSourceSerializer extends JsonSerializer { + @Override + public void serialize(GitSource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GitSourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GitSourceDeserializer extends JsonDeserializer { + @Override + public GitSource deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GitSourcePb pb = mapper.readValue(p, GitSourcePb.class); + return GitSource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSourcePb.java new file mode 100755 index 000000000..c78f8c7d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSourcePb.java @@ -0,0 +1,137 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * An optional specification for a remote Git repository containing the source code used by tasks. + * Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. + * + *

If `git_source` is set, these tasks retrieve the file from the remote repository by default. + * However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. + * + *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks + * are used, `git_source` must be defined on the job. + */ +@Generated +class GitSourcePb { + @JsonProperty("git_branch") + private String gitBranch; + + @JsonProperty("git_commit") + private String gitCommit; + + @JsonProperty("git_provider") + private GitProvider gitProvider; + + @JsonProperty("git_snapshot") + private GitSnapshot gitSnapshot; + + @JsonProperty("git_tag") + private String gitTag; + + @JsonProperty("git_url") + private String gitUrl; + + @JsonProperty("job_source") + private JobSource jobSource; + + public GitSourcePb setGitBranch(String gitBranch) { + this.gitBranch = gitBranch; + return this; + } + + public String getGitBranch() { + return gitBranch; + } + + public GitSourcePb setGitCommit(String gitCommit) { + this.gitCommit = gitCommit; + return this; + } + + public String getGitCommit() { + return gitCommit; + } + + public GitSourcePb setGitProvider(GitProvider gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public GitProvider getGitProvider() { + return gitProvider; + } + + public GitSourcePb setGitSnapshot(GitSnapshot gitSnapshot) { + this.gitSnapshot = gitSnapshot; + return this; + } + + public GitSnapshot getGitSnapshot() { + return gitSnapshot; + } + + public GitSourcePb setGitTag(String gitTag) { + this.gitTag = gitTag; + return this; + } + + public String getGitTag() { + return gitTag; + } + + public GitSourcePb setGitUrl(String gitUrl) { + this.gitUrl = gitUrl; + return this; + } + + public String getGitUrl() { + return gitUrl; + } + + public GitSourcePb setJobSource(JobSource jobSource) { + this.jobSource = jobSource; + return this; + } + + public JobSource getJobSource() { + return jobSource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitSourcePb that = (GitSourcePb) o; + return Objects.equals(gitBranch, that.gitBranch) + && Objects.equals(gitCommit, that.gitCommit) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitSnapshot, that.gitSnapshot) + && Objects.equals(gitTag, that.gitTag) + && Objects.equals(gitUrl, that.gitUrl) + && Objects.equals(jobSource, that.jobSource); + } + + @Override + public int hashCode() { + return Objects.hash(gitBranch, gitCommit, gitProvider, gitSnapshot, gitTag, gitUrl, jobSource); + } + + @Override + public String toString() { + return new ToStringer(GitSourcePb.class) + .add("gitBranch", gitBranch) + .add("gitCommit", gitCommit) + .add("gitProvider", gitProvider) + .add("gitSnapshot", gitSnapshot) + .add("gitTag", gitTag) + .add("gitUrl", gitUrl) + .add("jobSource", jobSource) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index 062121875..1f0b9225b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Job was retrieved successfully. */ @Generated +@JsonSerialize(using = Job.JobSerializer.class) +@JsonDeserialize(using = Job.JobDeserializer.class) public class Job { /** * The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC). */ - @JsonProperty("created_time") private Long createdTime; /** * The creator user name. This field won’t be included in the response if the user has already * been deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** @@ -29,7 +38,6 @@ public class Job { * Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based * on accessible budget policies of the run_as identity on job creation or modification. */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; /** @@ -37,15 +45,12 @@ public class Job { * They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 * :method:jobs/list requests with `expand_tasks=true`. */ - @JsonProperty("has_more") private Boolean hasMore; /** The canonical identifier for this job. */ - @JsonProperty("job_id") private Long jobId; /** A token that can be used to list the next page of array properties. */ - @JsonProperty("next_page_token") private String nextPageToken; /** @@ -57,14 +62,12 @@ public class Job { * creator of the job if job access control is disabled or to the user with the `is_owner` * permission if job access control is enabled. */ - @JsonProperty("run_as_user_name") private String runAsUserName; /** * Settings for this job and all of its runs. These settings can be updated using the `resetJob` * method. */ - @JsonProperty("settings") private JobSettings settings; public Job setCreatedTime(Long createdTime) { @@ -180,4 +183,51 @@ public String toString() { .add("settings", settings) .toString(); } + + JobPb toPb() { + JobPb pb = new JobPb(); + pb.setCreatedTime(createdTime); + pb.setCreatorUserName(creatorUserName); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + pb.setHasMore(hasMore); + pb.setJobId(jobId); + pb.setNextPageToken(nextPageToken); + pb.setRunAsUserName(runAsUserName); + pb.setSettings(settings); + + return pb; + } + + static Job fromPb(JobPb pb) { + Job model = new Job(); + model.setCreatedTime(pb.getCreatedTime()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + model.setHasMore(pb.getHasMore()); + model.setJobId(pb.getJobId()); + model.setNextPageToken(pb.getNextPageToken()); + model.setRunAsUserName(pb.getRunAsUserName()); + model.setSettings(pb.getSettings()); + + return model; + } + + public static class JobSerializer extends JsonSerializer { + @Override + public void serialize(Job value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobDeserializer extends JsonDeserializer { + @Override + public Job deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobPb pb = mapper.readValue(p, JobPb.class); + return Job.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java index d2e6553d1..519b3b062 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobAccessControlRequest.JobAccessControlRequestSerializer.class) +@JsonDeserialize(using = JobAccessControlRequest.JobAccessControlRequestDeserializer.class) public class JobAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public JobAccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,47 @@ public String toString() { .add("userName", userName) .toString(); } + + JobAccessControlRequestPb toPb() { + JobAccessControlRequestPb pb = new JobAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static JobAccessControlRequest fromPb(JobAccessControlRequestPb pb) { + JobAccessControlRequest model = new JobAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class JobAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + JobAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public JobAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobAccessControlRequestPb pb = mapper.readValue(p, JobAccessControlRequestPb.class); + return JobAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequestPb.java new file mode 100755 index 000000000..b4a7d2587 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private JobPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public JobAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public JobAccessControlRequestPb setPermissionLevel(JobPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public JobPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public JobAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public JobAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobAccessControlRequestPb that = (JobAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(JobAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java index cb4f05c8a..8b269c45f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = JobAccessControlResponse.JobAccessControlResponseSerializer.class) +@JsonDeserialize(using = JobAccessControlResponse.JobAccessControlResponseDeserializer.class) public class JobAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public JobAccessControlResponse setAllPermissions(Collection allPermissions) { @@ -102,4 +108,49 @@ public String toString() { .add("userName", userName) .toString(); } + + JobAccessControlResponsePb toPb() { + JobAccessControlResponsePb pb = new JobAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static JobAccessControlResponse fromPb(JobAccessControlResponsePb pb) { + JobAccessControlResponse model = new JobAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class JobAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + JobAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public JobAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobAccessControlResponsePb pb = mapper.readValue(p, JobAccessControlResponsePb.class); + return JobAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponsePb.java new file mode 100755 index 000000000..c5fea4a95 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class JobAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public JobAccessControlResponsePb setAllPermissions(Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public JobAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public JobAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public JobAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public JobAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobAccessControlResponsePb that = (JobAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(JobAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java index 25d519e34..89d53b3d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobCluster.JobClusterSerializer.class) +@JsonDeserialize(using = JobCluster.JobClusterDeserializer.class) public class JobCluster { /** * A unique name for the job cluster. This field is required and must be unique within the job. * `JobTaskSettings` may refer to this field to determine which cluster to launch for the task * execution. */ - @JsonProperty("job_cluster_key") private String jobClusterKey; /** If new_cluster, a description of a cluster that is created for each task. */ - @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; public JobCluster setJobClusterKey(String jobClusterKey) { @@ -60,4 +69,39 @@ public String toString() { .add("newCluster", newCluster) .toString(); } + + JobClusterPb toPb() { + JobClusterPb pb = new JobClusterPb(); + pb.setJobClusterKey(jobClusterKey); + pb.setNewCluster(newCluster); + + return pb; + } + + static JobCluster fromPb(JobClusterPb pb) { + JobCluster model = new JobCluster(); + model.setJobClusterKey(pb.getJobClusterKey()); + model.setNewCluster(pb.getNewCluster()); + + return model; + } + + public static class JobClusterSerializer extends JsonSerializer { + @Override + public void serialize(JobCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobClusterDeserializer extends JsonDeserializer { + @Override + public JobCluster deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobClusterPb pb = mapper.readValue(p, JobClusterPb.class); + return JobCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobClusterPb.java new file mode 100755 index 000000000..1651f5b3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobClusterPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobClusterPb { + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + @JsonProperty("new_cluster") + private com.databricks.sdk.service.compute.ClusterSpec newCluster; + + public JobClusterPb setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + + public JobClusterPb setNewCluster(com.databricks.sdk.service.compute.ClusterSpec newCluster) { + this.newCluster = newCluster; + return this; + } + + public com.databricks.sdk.service.compute.ClusterSpec getNewCluster() { + return newCluster; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobClusterPb that = (JobClusterPb) o; + return Objects.equals(jobClusterKey, that.jobClusterKey) + && Objects.equals(newCluster, that.newCluster); + } + + @Override + public int hashCode() { + return Objects.hash(jobClusterKey, newCluster); + } + + @Override + public String toString() { + return new ToStringer(JobClusterPb.class) + .add("jobClusterKey", jobClusterKey) + .add("newCluster", newCluster) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliance.java index 194d6cdde..35bca7946 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliance.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = JobCompliance.JobComplianceSerializer.class) +@JsonDeserialize(using = JobCompliance.JobComplianceDeserializer.class) public class JobCompliance { /** Whether this job is in compliance with the latest version of its policy. */ - @JsonProperty("is_compliant") private Boolean isCompliant; /** Canonical unique identifier for a job. */ - @JsonProperty("job_id") private Long jobId; /** @@ -24,7 +33,6 @@ public class JobCompliance { * the job cluster is prepended to the path. The values indicate an error message describing the * policy validation error. */ - @JsonProperty("violations") private Map violations; public JobCompliance setIsCompliant(Boolean isCompliant) { @@ -77,4 +85,41 @@ public String toString() { .add("violations", violations) .toString(); } + + JobCompliancePb toPb() { + JobCompliancePb pb = new JobCompliancePb(); + pb.setIsCompliant(isCompliant); + pb.setJobId(jobId); + pb.setViolations(violations); + + return pb; + } + + static JobCompliance fromPb(JobCompliancePb pb) { + JobCompliance model = new JobCompliance(); + model.setIsCompliant(pb.getIsCompliant()); + model.setJobId(pb.getJobId()); + model.setViolations(pb.getViolations()); + + return model; + } + + public static class JobComplianceSerializer extends JsonSerializer { + @Override + public void serialize(JobCompliance value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobCompliancePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobComplianceDeserializer extends JsonDeserializer { + @Override + public JobCompliance deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobCompliancePb pb = mapper.readValue(p, JobCompliancePb.class); + return JobCompliance.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliancePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliancePb.java new file mode 100755 index 000000000..c44a1b7d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompliancePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class JobCompliancePb { + @JsonProperty("is_compliant") + private Boolean isCompliant; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("violations") + private Map violations; + + public JobCompliancePb setIsCompliant(Boolean isCompliant) { + this.isCompliant = isCompliant; + return this; + } + + public Boolean getIsCompliant() { + return isCompliant; + } + + public JobCompliancePb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public JobCompliancePb setViolations(Map violations) { + this.violations = violations; + return this; + } + + public Map getViolations() { + return violations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobCompliancePb that = (JobCompliancePb) o; + return Objects.equals(isCompliant, that.isCompliant) + && Objects.equals(jobId, that.jobId) + && Objects.equals(violations, that.violations); + } + + @Override + public int hashCode() { + return Objects.hash(isCompliant, jobId, violations); + } + + @Override + public String toString() { + return new ToStringer(JobCompliancePb.class) + .add("isCompliant", isCompliant) + .add("jobId", jobId) + .add("violations", violations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java index 1487a8cb2..ce3fc2461 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobDeployment.JobDeploymentSerializer.class) +@JsonDeserialize(using = JobDeployment.JobDeploymentDeserializer.class) public class JobDeployment { /** * The kind of deployment that manages the job. * *

* `BUNDLE`: The job is managed by Databricks Asset Bundle. */ - @JsonProperty("kind") private JobDeploymentKind kind; /** Path of the file that contains deployment metadata. */ - @JsonProperty("metadata_file_path") private String metadataFilePath; public JobDeployment setKind(JobDeploymentKind kind) { @@ -60,4 +69,39 @@ public String toString() { .add("metadataFilePath", metadataFilePath) .toString(); } + + JobDeploymentPb toPb() { + JobDeploymentPb pb = new JobDeploymentPb(); + pb.setKind(kind); + pb.setMetadataFilePath(metadataFilePath); + + return pb; + } + + static JobDeployment fromPb(JobDeploymentPb pb) { + JobDeployment model = new JobDeployment(); + model.setKind(pb.getKind()); + model.setMetadataFilePath(pb.getMetadataFilePath()); + + return model; + } + + public static class JobDeploymentSerializer extends JsonSerializer { + @Override + public void serialize(JobDeployment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobDeploymentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobDeploymentDeserializer extends JsonDeserializer { + @Override + public JobDeployment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobDeploymentPb pb = mapper.readValue(p, JobDeploymentPb.class); + return JobDeployment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentPb.java new file mode 100755 index 000000000..c0562a811 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobDeploymentPb { + @JsonProperty("kind") + private JobDeploymentKind kind; + + @JsonProperty("metadata_file_path") + private String metadataFilePath; + + public JobDeploymentPb setKind(JobDeploymentKind kind) { + this.kind = kind; + return this; + } + + public JobDeploymentKind getKind() { + return kind; + } + + public JobDeploymentPb setMetadataFilePath(String metadataFilePath) { + this.metadataFilePath = metadataFilePath; + return this; + } + + public String getMetadataFilePath() { + return metadataFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobDeploymentPb that = (JobDeploymentPb) o; + return Objects.equals(kind, that.kind) + && Objects.equals(metadataFilePath, that.metadataFilePath); + } + + @Override + public int hashCode() { + return Objects.hash(kind, metadataFilePath); + } + + @Override + public String toString() { + return new ToStringer(JobDeploymentPb.class) + .add("kind", kind) + .add("metadataFilePath", metadataFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java index 6c56a80cc..7bb03d83e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = JobEmailNotifications.JobEmailNotificationsSerializer.class) +@JsonDeserialize(using = JobEmailNotifications.JobEmailNotificationsDeserializer.class) public class JobEmailNotifications { /** * If true, do not send email to recipients specified in `on_failure` if the run is skipped. This * field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. */ - @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; /** @@ -23,7 +33,6 @@ public class JobEmailNotifications { * `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are * not sent. */ - @JsonProperty("on_duration_warning_threshold_exceeded") private Collection onDurationWarningThresholdExceeded; /** @@ -32,14 +41,12 @@ public class JobEmailNotifications { * `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job * creation, reset, or update the list is empty, and notifications are not sent. */ - @JsonProperty("on_failure") private Collection onFailure; /** * A list of email addresses to be notified when a run begins. If not specified on job creation, * reset, or update, the list is empty, and notifications are not sent. */ - @JsonProperty("on_start") private Collection onStart; /** @@ -49,7 +56,6 @@ public class JobEmailNotifications { * or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If * the issue persists, notifications are resent every 30 minutes. */ - @JsonProperty("on_streaming_backlog_exceeded") private Collection onStreamingBacklogExceeded; /** @@ -58,7 +64,6 @@ public class JobEmailNotifications { * `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, * and notifications are not sent. */ - @JsonProperty("on_success") private Collection onSuccess; public JobEmailNotifications setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { @@ -153,4 +158,51 @@ public String toString() { .add("onSuccess", onSuccess) .toString(); } + + JobEmailNotificationsPb toPb() { + JobEmailNotificationsPb pb = new JobEmailNotificationsPb(); + pb.setNoAlertForSkippedRuns(noAlertForSkippedRuns); + pb.setOnDurationWarningThresholdExceeded(onDurationWarningThresholdExceeded); + pb.setOnFailure(onFailure); + pb.setOnStart(onStart); + pb.setOnStreamingBacklogExceeded(onStreamingBacklogExceeded); + pb.setOnSuccess(onSuccess); + + return pb; + } + + static JobEmailNotifications fromPb(JobEmailNotificationsPb pb) { + JobEmailNotifications model = new JobEmailNotifications(); + model.setNoAlertForSkippedRuns(pb.getNoAlertForSkippedRuns()); + model.setOnDurationWarningThresholdExceeded(pb.getOnDurationWarningThresholdExceeded()); + model.setOnFailure(pb.getOnFailure()); + model.setOnStart(pb.getOnStart()); + model.setOnStreamingBacklogExceeded(pb.getOnStreamingBacklogExceeded()); + model.setOnSuccess(pb.getOnSuccess()); + + return model; + } + + public static class JobEmailNotificationsSerializer + extends JsonSerializer { + @Override + public void serialize( + JobEmailNotifications value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobEmailNotificationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobEmailNotificationsDeserializer + extends JsonDeserializer { + @Override + public JobEmailNotifications deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobEmailNotificationsPb pb = mapper.readValue(p, JobEmailNotificationsPb.class); + return JobEmailNotifications.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotificationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotificationsPb.java new file mode 100755 index 000000000..d90ca6c2a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotificationsPb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class JobEmailNotificationsPb { + @JsonProperty("no_alert_for_skipped_runs") + private Boolean noAlertForSkippedRuns; + + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection onDurationWarningThresholdExceeded; + + @JsonProperty("on_failure") + private Collection onFailure; + + @JsonProperty("on_start") + private Collection onStart; + + @JsonProperty("on_streaming_backlog_exceeded") + private Collection onStreamingBacklogExceeded; + + @JsonProperty("on_success") + private Collection onSuccess; + + public JobEmailNotificationsPb setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { + this.noAlertForSkippedRuns = noAlertForSkippedRuns; + return this; + } + + public Boolean getNoAlertForSkippedRuns() { + return noAlertForSkippedRuns; + } + + public JobEmailNotificationsPb setOnDurationWarningThresholdExceeded( + Collection onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + + public JobEmailNotificationsPb setOnFailure(Collection onFailure) { + this.onFailure = onFailure; + return this; + } + + public Collection getOnFailure() { + return onFailure; + } + + public JobEmailNotificationsPb setOnStart(Collection onStart) { + this.onStart = onStart; + return this; + } + + public Collection getOnStart() { + return onStart; + } + + public JobEmailNotificationsPb setOnStreamingBacklogExceeded( + Collection onStreamingBacklogExceeded) { + this.onStreamingBacklogExceeded = onStreamingBacklogExceeded; + return this; + } + + public Collection getOnStreamingBacklogExceeded() { + return onStreamingBacklogExceeded; + } + + public JobEmailNotificationsPb setOnSuccess(Collection onSuccess) { + this.onSuccess = onSuccess; + return this; + } + + public Collection getOnSuccess() { + return onSuccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobEmailNotificationsPb that = (JobEmailNotificationsPb) o; + return Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns) + && Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) + && Objects.equals(onFailure, that.onFailure) + && Objects.equals(onStart, that.onStart) + && Objects.equals(onStreamingBacklogExceeded, that.onStreamingBacklogExceeded) + && Objects.equals(onSuccess, that.onSuccess); + } + + @Override + public int hashCode() { + return Objects.hash( + noAlertForSkippedRuns, + onDurationWarningThresholdExceeded, + onFailure, + onStart, + onStreamingBacklogExceeded, + onSuccess); + } + + @Override + public String toString() { + return new ToStringer(JobEmailNotificationsPb.class) + .add("noAlertForSkippedRuns", noAlertForSkippedRuns) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) + .add("onFailure", onFailure) + .add("onStart", onStart) + .add("onStreamingBacklogExceeded", onStreamingBacklogExceeded) + .add("onSuccess", onSuccess) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java index dc3dcccea..f83bf7009 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobEnvironment.JobEnvironmentSerializer.class) +@JsonDeserialize(using = JobEnvironment.JobEnvironmentDeserializer.class) public class JobEnvironment { /** The key of an environment. It has to be unique within a job. */ - @JsonProperty("environment_key") private String environmentKey; /** @@ -18,7 +28,6 @@ public class JobEnvironment { * for non-notebook task, and DLT's environment for classic and serverless pipelines. In this * minimal environment spec, only pip dependencies are supported. */ - @JsonProperty("spec") private com.databricks.sdk.service.compute.Environment spec; public JobEnvironment setEnvironmentKey(String environmentKey) { @@ -59,4 +68,40 @@ public String toString() { .add("spec", spec) .toString(); } + + JobEnvironmentPb toPb() { + JobEnvironmentPb pb = new JobEnvironmentPb(); + pb.setEnvironmentKey(environmentKey); + pb.setSpec(spec); + + return pb; + } + + static JobEnvironment fromPb(JobEnvironmentPb pb) { + JobEnvironment model = new JobEnvironment(); + model.setEnvironmentKey(pb.getEnvironmentKey()); + model.setSpec(pb.getSpec()); + + return model; + } + + public static class JobEnvironmentSerializer extends JsonSerializer { + @Override + public void serialize(JobEnvironment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobEnvironmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobEnvironmentDeserializer extends JsonDeserializer { + @Override + public JobEnvironment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobEnvironmentPb pb = mapper.readValue(p, JobEnvironmentPb.class); + return JobEnvironment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironmentPb.java new file mode 100755 index 000000000..dff2df6da --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironmentPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobEnvironmentPb { + @JsonProperty("environment_key") + private String environmentKey; + + @JsonProperty("spec") + private com.databricks.sdk.service.compute.Environment spec; + + public JobEnvironmentPb setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + + public JobEnvironmentPb setSpec(com.databricks.sdk.service.compute.Environment spec) { + this.spec = spec; + return this; + } + + public com.databricks.sdk.service.compute.Environment getSpec() { + return spec; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobEnvironmentPb that = (JobEnvironmentPb) o; + return Objects.equals(environmentKey, that.environmentKey) && Objects.equals(spec, that.spec); + } + + @Override + public int hashCode() { + return Objects.hash(environmentKey, spec); + } + + @Override + public String toString() { + return new ToStringer(JobEnvironmentPb.class) + .add("environmentKey", environmentKey) + .add("spec", spec) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java index 60c2a008f..5c113b833 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobNotificationSettings.JobNotificationSettingsSerializer.class) +@JsonDeserialize(using = JobNotificationSettings.JobNotificationSettingsDeserializer.class) public class JobNotificationSettings { /** * If true, do not send notifications to recipients specified in `on_failure` if the run is * canceled. */ - @JsonProperty("no_alert_for_canceled_runs") private Boolean noAlertForCanceledRuns; /** * If true, do not send notifications to recipients specified in `on_failure` if the run is * skipped. */ - @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; public JobNotificationSettings setNoAlertForCanceledRuns(Boolean noAlertForCanceledRuns) { @@ -62,4 +71,43 @@ public String toString() { .add("noAlertForSkippedRuns", noAlertForSkippedRuns) .toString(); } + + JobNotificationSettingsPb toPb() { + JobNotificationSettingsPb pb = new JobNotificationSettingsPb(); + pb.setNoAlertForCanceledRuns(noAlertForCanceledRuns); + pb.setNoAlertForSkippedRuns(noAlertForSkippedRuns); + + return pb; + } + + static JobNotificationSettings fromPb(JobNotificationSettingsPb pb) { + JobNotificationSettings model = new JobNotificationSettings(); + model.setNoAlertForCanceledRuns(pb.getNoAlertForCanceledRuns()); + model.setNoAlertForSkippedRuns(pb.getNoAlertForSkippedRuns()); + + return model; + } + + public static class JobNotificationSettingsSerializer + extends JsonSerializer { + @Override + public void serialize( + JobNotificationSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobNotificationSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobNotificationSettingsDeserializer + extends JsonDeserializer { + @Override + public JobNotificationSettings deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobNotificationSettingsPb pb = mapper.readValue(p, JobNotificationSettingsPb.class); + return JobNotificationSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettingsPb.java new file mode 100755 index 000000000..8c767b222 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettingsPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobNotificationSettingsPb { + @JsonProperty("no_alert_for_canceled_runs") + private Boolean noAlertForCanceledRuns; + + @JsonProperty("no_alert_for_skipped_runs") + private Boolean noAlertForSkippedRuns; + + public JobNotificationSettingsPb setNoAlertForCanceledRuns(Boolean noAlertForCanceledRuns) { + this.noAlertForCanceledRuns = noAlertForCanceledRuns; + return this; + } + + public Boolean getNoAlertForCanceledRuns() { + return noAlertForCanceledRuns; + } + + public JobNotificationSettingsPb setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { + this.noAlertForSkippedRuns = noAlertForSkippedRuns; + return this; + } + + public Boolean getNoAlertForSkippedRuns() { + return noAlertForSkippedRuns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobNotificationSettingsPb that = (JobNotificationSettingsPb) o; + return Objects.equals(noAlertForCanceledRuns, that.noAlertForCanceledRuns) + && Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns); + } + + @Override + public int hashCode() { + return Objects.hash(noAlertForCanceledRuns, noAlertForSkippedRuns); + } + + @Override + public String toString() { + return new ToStringer(JobNotificationSettingsPb.class) + .add("noAlertForCanceledRuns", noAlertForCanceledRuns) + .add("noAlertForSkippedRuns", noAlertForSkippedRuns) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java index 36dd86e3d..053a1c046 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobParameter.JobParameterSerializer.class) +@JsonDeserialize(using = JobParameter.JobParameterDeserializer.class) public class JobParameter { /** The optional default value of the parameter */ - @JsonProperty("default") private String defaultValue; /** The name of the parameter */ - @JsonProperty("name") private String name; /** The value used in the run */ - @JsonProperty("value") private String value; public JobParameter setDefault(String defaultValue) { @@ -71,4 +79,41 @@ public String toString() { .add("value", value) .toString(); } + + JobParameterPb toPb() { + JobParameterPb pb = new JobParameterPb(); + pb.setDefault(defaultValue); + pb.setName(name); + pb.setValue(value); + + return pb; + } + + static JobParameter fromPb(JobParameterPb pb) { + JobParameter model = new JobParameter(); + model.setDefault(pb.getDefault()); + model.setName(pb.getName()); + model.setValue(pb.getValue()); + + return model; + } + + public static class JobParameterSerializer extends JsonSerializer { + @Override + public void serialize(JobParameter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobParameterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobParameterDeserializer extends JsonDeserializer { + @Override + public JobParameter deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobParameterPb pb = mapper.readValue(p, JobParameterPb.class); + return JobParameter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinition.java index d383ad6f9..d46bfe20d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinition.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobParameterDefinition.JobParameterDefinitionSerializer.class) +@JsonDeserialize(using = JobParameterDefinition.JobParameterDefinitionDeserializer.class) public class JobParameterDefinition { /** Default value of the parameter. */ - @JsonProperty("default") private String defaultValue; /** * The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.` */ - @JsonProperty("name") private String name; public JobParameterDefinition setDefault(String defaultValue) { @@ -57,4 +66,43 @@ public String toString() { .add("name", name) .toString(); } + + JobParameterDefinitionPb toPb() { + JobParameterDefinitionPb pb = new JobParameterDefinitionPb(); + pb.setDefault(defaultValue); + pb.setName(name); + + return pb; + } + + static JobParameterDefinition fromPb(JobParameterDefinitionPb pb) { + JobParameterDefinition model = new JobParameterDefinition(); + model.setDefault(pb.getDefault()); + model.setName(pb.getName()); + + return model; + } + + public static class JobParameterDefinitionSerializer + extends JsonSerializer { + @Override + public void serialize( + JobParameterDefinition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobParameterDefinitionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobParameterDefinitionDeserializer + extends JsonDeserializer { + @Override + public JobParameterDefinition deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobParameterDefinitionPb pb = mapper.readValue(p, JobParameterDefinitionPb.class); + return JobParameterDefinition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinitionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinitionPb.java new file mode 100755 index 000000000..8c52b6601 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinitionPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobParameterDefinitionPb { + @JsonProperty("default") + private String defaultValue; + + @JsonProperty("name") + private String name; + + public JobParameterDefinitionPb setDefault(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public String getDefault() { + return defaultValue; + } + + public JobParameterDefinitionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobParameterDefinitionPb that = (JobParameterDefinitionPb) o; + return Objects.equals(defaultValue, that.defaultValue) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(defaultValue, name); + } + + @Override + public String toString() { + return new ToStringer(JobParameterDefinitionPb.class) + .add("defaultValue", defaultValue) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterPb.java new file mode 100755 index 000000000..5f71bd19e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobParameterPb { + @JsonProperty("default") + private String defaultValue; + + @JsonProperty("name") + private String name; + + @JsonProperty("value") + private String value; + + public JobParameterPb setDefault(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public String getDefault() { + return defaultValue; + } + + public JobParameterPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public JobParameterPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobParameterPb that = (JobParameterPb) o; + return Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(name, that.name) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(defaultValue, name, value); + } + + @Override + public String toString() { + return new ToStringer(JobParameterPb.class) + .add("defaultValue", defaultValue) + .add("name", name) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPb.java new file mode 100755 index 000000000..e31ece5f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPb.java @@ -0,0 +1,150 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Job was retrieved successfully. */ +@Generated +class JobPb { + @JsonProperty("created_time") + private Long createdTime; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("run_as_user_name") + private String runAsUserName; + + @JsonProperty("settings") + private JobSettings settings; + + public JobPb setCreatedTime(Long createdTime) { + this.createdTime = createdTime; + return this; + } + + public Long getCreatedTime() { + return createdTime; + } + + public JobPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public JobPb setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public JobPb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public JobPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public JobPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public JobPb setRunAsUserName(String runAsUserName) { + this.runAsUserName = runAsUserName; + return this; + } + + public String getRunAsUserName() { + return runAsUserName; + } + + public JobPb setSettings(JobSettings settings) { + this.settings = settings; + return this; + } + + public JobSettings getSettings() { + return settings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobPb that = (JobPb) o; + return Objects.equals(createdTime, that.createdTime) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(hasMore, that.hasMore) + && Objects.equals(jobId, that.jobId) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(runAsUserName, that.runAsUserName) + && Objects.equals(settings, that.settings); + } + + @Override + public int hashCode() { + return Objects.hash( + createdTime, + creatorUserName, + effectiveBudgetPolicyId, + hasMore, + jobId, + nextPageToken, + runAsUserName, + settings); + } + + @Override + public String toString() { + return new ToStringer(JobPb.class) + .add("createdTime", createdTime) + .add("creatorUserName", creatorUserName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("hasMore", hasMore) + .add("jobId", jobId) + .add("nextPageToken", nextPageToken) + .add("runAsUserName", runAsUserName) + .add("settings", settings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java index 4efc5cb2a..c1c25f134 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = JobPermission.JobPermissionSerializer.class) +@JsonDeserialize(using = JobPermission.JobPermissionDeserializer.class) public class JobPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; public JobPermission setInherited(Boolean inherited) { @@ -72,4 +80,41 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + JobPermissionPb toPb() { + JobPermissionPb pb = new JobPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static JobPermission fromPb(JobPermissionPb pb) { + JobPermission model = new JobPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class JobPermissionSerializer extends JsonSerializer { + @Override + public void serialize(JobPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobPermissionDeserializer extends JsonDeserializer { + @Override + public JobPermission deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobPermissionPb pb = mapper.readValue(p, JobPermissionPb.class); + return JobPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionPb.java new file mode 100755 index 000000000..4815ccc3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class JobPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private JobPermissionLevel permissionLevel; + + public JobPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public JobPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public JobPermissionPb setPermissionLevel(JobPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public JobPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobPermissionPb that = (JobPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(JobPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions.java index 1a9e6e79e..fdefab2d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = JobPermissions.JobPermissionsSerializer.class) +@JsonDeserialize(using = JobPermissions.JobPermissionsDeserializer.class) public class JobPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public JobPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + JobPermissionsPb toPb() { + JobPermissionsPb pb = new JobPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static JobPermissions fromPb(JobPermissionsPb pb) { + JobPermissions model = new JobPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class JobPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(JobPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobPermissionsDeserializer extends JsonDeserializer { + @Override + public JobPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobPermissionsPb pb = mapper.readValue(p, JobPermissionsPb.class); + return JobPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java index cbd9caa08..e3778c937 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobPermissionsDescription.JobPermissionsDescriptionSerializer.class) +@JsonDeserialize(using = JobPermissionsDescription.JobPermissionsDescriptionDeserializer.class) public class JobPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; public JobPermissionsDescription setDescription(String description) { @@ -56,4 +65,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + JobPermissionsDescriptionPb toPb() { + JobPermissionsDescriptionPb pb = new JobPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static JobPermissionsDescription fromPb(JobPermissionsDescriptionPb pb) { + JobPermissionsDescription model = new JobPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class JobPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + JobPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public JobPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobPermissionsDescriptionPb pb = mapper.readValue(p, JobPermissionsDescriptionPb.class); + return JobPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescriptionPb.java new file mode 100755 index 000000000..d0e289bba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private JobPermissionLevel permissionLevel; + + public JobPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public JobPermissionsDescriptionPb setPermissionLevel(JobPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public JobPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobPermissionsDescriptionPb that = (JobPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(JobPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsPb.java new file mode 100755 index 000000000..8e44cb8b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class JobPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public JobPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public JobPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public JobPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobPermissionsPb that = (JobPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(JobPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java index 25b46334b..f595b360d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = JobPermissionsRequest.JobPermissionsRequestSerializer.class) +@JsonDeserialize(using = JobPermissionsRequest.JobPermissionsRequestDeserializer.class) public class JobPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The job for which to get or manage permissions. */ - @JsonIgnore private String jobId; + private String jobId; public JobPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("jobId", jobId) .toString(); } + + JobPermissionsRequestPb toPb() { + JobPermissionsRequestPb pb = new JobPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setJobId(jobId); + + return pb; + } + + static JobPermissionsRequest fromPb(JobPermissionsRequestPb pb) { + JobPermissionsRequest model = new JobPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setJobId(pb.getJobId()); + + return model; + } + + public static class JobPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + JobPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public JobPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobPermissionsRequestPb pb = mapper.readValue(p, JobPermissionsRequestPb.class); + return JobPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequestPb.java new file mode 100755 index 000000000..3be94c9f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class JobPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String jobId; + + public JobPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public JobPermissionsRequestPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobPermissionsRequestPb that = (JobPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, jobId); + } + + @Override + public String toString() { + return new ToStringer(JobPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("jobId", jobId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java index 53dc83891..e50168ba0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -15,19 +24,19 @@ * thrown. */ @Generated +@JsonSerialize(using = JobRunAs.JobRunAsSerializer.class) +@JsonDeserialize(using = JobRunAs.JobRunAsDeserializer.class) public class JobRunAs { /** * Application ID of an active service principal. Setting this field requires the * `servicePrincipal/user` role. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** * The email of an active workspace user. Non-admin users can only set this field to their own * email. */ - @JsonProperty("user_name") private String userName; public JobRunAs setServicePrincipalName(String servicePrincipalName) { @@ -69,4 +78,39 @@ public String toString() { .add("userName", userName) .toString(); } + + JobRunAsPb toPb() { + JobRunAsPb pb = new JobRunAsPb(); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static JobRunAs fromPb(JobRunAsPb pb) { + JobRunAs model = new JobRunAs(); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class JobRunAsSerializer extends JsonSerializer { + @Override + public void serialize(JobRunAs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobRunAsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobRunAsDeserializer extends JsonDeserializer { + @Override + public JobRunAs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobRunAsPb pb = mapper.readValue(p, JobRunAsPb.class); + return JobRunAs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAsPb.java new file mode 100755 index 000000000..e2824c0ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAsPb.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Write-only setting. Specifies the user or service principal that the job runs as. If not + * specified, the job runs as the user who created the job. + * + *

Either `user_name` or `service_principal_name` should be specified. If not, an error is + * thrown. + */ +@Generated +class JobRunAsPb { + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public JobRunAsPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public JobRunAsPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobRunAsPb that = (JobRunAsPb) o; + return Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(JobRunAsPb.class) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index a79bee35f..750835d48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -4,36 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = JobSettings.JobSettingsSerializer.class) +@JsonDeserialize(using = JobSettings.JobSettingsDeserializer.class) public class JobSettings { /** * The id of the user specified budget policy to use for this job. If not specified, a default * budget policy may be applied when creating or modifying the job. See * `effective_budget_policy_id` for the budget policy used by this workload. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. */ - @JsonProperty("continuous") private Continuous continuous; /** Deployment information for jobs managed by external sources. */ - @JsonProperty("deployment") private JobDeployment deployment; /** * An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. */ - @JsonProperty("description") private String description; /** @@ -42,14 +49,12 @@ public class JobSettings { *

* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job * is in an editable state and can be modified. */ - @JsonProperty("edit_mode") private JobEditMode editMode; /** * An optional set of email addresses that is notified when runs of this job begin or complete as * well as when this job is deleted. */ - @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; /** @@ -59,14 +64,12 @@ public class JobSettings { * serverless tasks, the task environment is required to be specified using environment_key in the * task settings. */ - @JsonProperty("environments") private Collection environments; /** * Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. * When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. */ - @JsonProperty("format") private Format format; /** @@ -80,11 +83,9 @@ public class JobSettings { *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** An optional set of health rules that can be defined for this job. */ - @JsonProperty("health") private JobsHealthRules health; /** @@ -92,7 +93,6 @@ public class JobSettings { * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in * task settings. */ - @JsonProperty("job_clusters") private Collection jobClusters; /** @@ -105,22 +105,18 @@ public class JobSettings { * runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ - @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; /** An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding. */ - @JsonProperty("name") private String name; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this job. */ - @JsonProperty("notification_settings") private JobNotificationSettings notificationSettings; /** Job-level parameter definitions */ - @JsonProperty("parameters") private Collection parameters; /** @@ -131,11 +127,9 @@ public class JobSettings { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("performance_target") private PerformanceTarget performanceTarget; /** The queue settings of the job. */ - @JsonProperty("queue") private QueueSettings queue; /** @@ -145,14 +139,12 @@ public class JobSettings { *

Either `user_name` or `service_principal_name` should be specified. If not, an error is * thrown. */ - @JsonProperty("run_as") private JobRunAs runAs; /** * An optional periodic schedule for this job. The default behavior is that the job only runs when * triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. */ - @JsonProperty("schedule") private CronSchedule schedule; /** @@ -160,7 +152,6 @@ public class JobSettings { * jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags * can be added to the job. */ - @JsonProperty("tags") private Map tags; /** @@ -170,11 +161,9 @@ public class JobSettings { * available, you can paginate through them using :method:jobs/get. Use the `next_page_token` * field at the object root to determine if more results are available. */ - @JsonProperty("tasks") private Collection tasks; /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** @@ -182,11 +171,9 @@ public class JobSettings { * the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request * to `runNow`. */ - @JsonProperty("trigger") private TriggerSettings trigger; /** A collection of system notification IDs to notify when runs of this job begin or complete. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; public JobSettings setBudgetPolicyId(String budgetPolicyId) { @@ -494,4 +481,83 @@ public String toString() { .add("webhookNotifications", webhookNotifications) .toString(); } + + JobSettingsPb toPb() { + JobSettingsPb pb = new JobSettingsPb(); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setContinuous(continuous); + pb.setDeployment(deployment); + pb.setDescription(description); + pb.setEditMode(editMode); + pb.setEmailNotifications(emailNotifications); + pb.setEnvironments(environments); + pb.setFormat(format); + pb.setGitSource(gitSource); + pb.setHealth(health); + pb.setJobClusters(jobClusters); + pb.setMaxConcurrentRuns(maxConcurrentRuns); + pb.setName(name); + pb.setNotificationSettings(notificationSettings); + pb.setParameters(parameters); + pb.setPerformanceTarget(performanceTarget); + pb.setQueue(queue); + pb.setRunAs(runAs); + pb.setSchedule(schedule); + pb.setTags(tags); + pb.setTasks(tasks); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setTrigger(trigger); + pb.setWebhookNotifications(webhookNotifications); + + return pb; + } + + static JobSettings fromPb(JobSettingsPb pb) { + JobSettings model = new JobSettings(); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setContinuous(pb.getContinuous()); + model.setDeployment(pb.getDeployment()); + model.setDescription(pb.getDescription()); + model.setEditMode(pb.getEditMode()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEnvironments(pb.getEnvironments()); + model.setFormat(pb.getFormat()); + model.setGitSource(pb.getGitSource()); + model.setHealth(pb.getHealth()); + model.setJobClusters(pb.getJobClusters()); + model.setMaxConcurrentRuns(pb.getMaxConcurrentRuns()); + model.setName(pb.getName()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setParameters(pb.getParameters()); + model.setPerformanceTarget(pb.getPerformanceTarget()); + model.setQueue(pb.getQueue()); + model.setRunAs(pb.getRunAs()); + model.setSchedule(pb.getSchedule()); + model.setTags(pb.getTags()); + model.setTasks(pb.getTasks()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setTrigger(pb.getTrigger()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + + return model; + } + + public static class JobSettingsSerializer extends JsonSerializer { + @Override + public void serialize(JobSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobSettingsDeserializer extends JsonDeserializer { + @Override + public JobSettings deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobSettingsPb pb = mapper.readValue(p, JobSettingsPb.class); + return JobSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsPb.java new file mode 100755 index 000000000..2c3a40c47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsPb.java @@ -0,0 +1,391 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class JobSettingsPb { + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("continuous") + private Continuous continuous; + + @JsonProperty("deployment") + private JobDeployment deployment; + + @JsonProperty("description") + private String description; + + @JsonProperty("edit_mode") + private JobEditMode editMode; + + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + + @JsonProperty("environments") + private Collection environments; + + @JsonProperty("format") + private Format format; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("health") + private JobsHealthRules health; + + @JsonProperty("job_clusters") + private Collection jobClusters; + + @JsonProperty("max_concurrent_runs") + private Long maxConcurrentRuns; + + @JsonProperty("name") + private String name; + + @JsonProperty("notification_settings") + private JobNotificationSettings notificationSettings; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("performance_target") + private PerformanceTarget performanceTarget; + + @JsonProperty("queue") + private QueueSettings queue; + + @JsonProperty("run_as") + private JobRunAs runAs; + + @JsonProperty("schedule") + private CronSchedule schedule; + + @JsonProperty("tags") + private Map tags; + + @JsonProperty("tasks") + private Collection tasks; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("trigger") + private TriggerSettings trigger; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + public JobSettingsPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public JobSettingsPb setContinuous(Continuous continuous) { + this.continuous = continuous; + return this; + } + + public Continuous getContinuous() { + return continuous; + } + + public JobSettingsPb setDeployment(JobDeployment deployment) { + this.deployment = deployment; + return this; + } + + public JobDeployment getDeployment() { + return deployment; + } + + public JobSettingsPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public JobSettingsPb setEditMode(JobEditMode editMode) { + this.editMode = editMode; + return this; + } + + public JobEditMode getEditMode() { + return editMode; + } + + public JobSettingsPb setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public JobSettingsPb setEnvironments(Collection environments) { + this.environments = environments; + return this; + } + + public Collection getEnvironments() { + return environments; + } + + public JobSettingsPb setFormat(Format format) { + this.format = format; + return this; + } + + public Format getFormat() { + return format; + } + + public JobSettingsPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public JobSettingsPb setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + + public JobSettingsPb setJobClusters(Collection jobClusters) { + this.jobClusters = jobClusters; + return this; + } + + public Collection getJobClusters() { + return jobClusters; + } + + public JobSettingsPb setMaxConcurrentRuns(Long maxConcurrentRuns) { + this.maxConcurrentRuns = maxConcurrentRuns; + return this; + } + + public Long getMaxConcurrentRuns() { + return maxConcurrentRuns; + } + + public JobSettingsPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public JobSettingsPb setNotificationSettings(JobNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public JobNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public JobSettingsPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public JobSettingsPb setPerformanceTarget(PerformanceTarget performanceTarget) { + this.performanceTarget = performanceTarget; + return this; + } + + public PerformanceTarget getPerformanceTarget() { + return performanceTarget; + } + + public JobSettingsPb setQueue(QueueSettings queue) { + this.queue = queue; + return this; + } + + public QueueSettings getQueue() { + return queue; + } + + public JobSettingsPb setRunAs(JobRunAs runAs) { + this.runAs = runAs; + return this; + } + + public JobRunAs getRunAs() { + return runAs; + } + + public JobSettingsPb setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public JobSettingsPb setTags(Map tags) { + this.tags = tags; + return this; + } + + public Map getTags() { + return tags; + } + + public JobSettingsPb setTasks(Collection tasks) { + this.tasks = tasks; + return this; + } + + public Collection getTasks() { + return tasks; + } + + public JobSettingsPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public JobSettingsPb setTrigger(TriggerSettings trigger) { + this.trigger = trigger; + return this; + } + + public TriggerSettings getTrigger() { + return trigger; + } + + public JobSettingsPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobSettingsPb that = (JobSettingsPb) o; + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) + && Objects.equals(description, that.description) + && Objects.equals(editMode, that.editMode) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environments, that.environments) + && Objects.equals(format, that.format) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) + && Objects.equals(jobClusters, that.jobClusters) + && Objects.equals(maxConcurrentRuns, that.maxConcurrentRuns) + && Objects.equals(name, that.name) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(parameters, that.parameters) + && Objects.equals(performanceTarget, that.performanceTarget) + && Objects.equals(queue, that.queue) + && Objects.equals(runAs, that.runAs) + && Objects.equals(schedule, that.schedule) + && Objects.equals(tags, that.tags) + && Objects.equals(tasks, that.tasks) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(trigger, that.trigger) + && Objects.equals(webhookNotifications, that.webhookNotifications); + } + + @Override + public int hashCode() { + return Objects.hash( + budgetPolicyId, + continuous, + deployment, + description, + editMode, + emailNotifications, + environments, + format, + gitSource, + health, + jobClusters, + maxConcurrentRuns, + name, + notificationSettings, + parameters, + performanceTarget, + queue, + runAs, + schedule, + tags, + tasks, + timeoutSeconds, + trigger, + webhookNotifications); + } + + @Override + public String toString() { + return new ToStringer(JobSettingsPb.class) + .add("budgetPolicyId", budgetPolicyId) + .add("continuous", continuous) + .add("deployment", deployment) + .add("description", description) + .add("editMode", editMode) + .add("emailNotifications", emailNotifications) + .add("environments", environments) + .add("format", format) + .add("gitSource", gitSource) + .add("health", health) + .add("jobClusters", jobClusters) + .add("maxConcurrentRuns", maxConcurrentRuns) + .add("name", name) + .add("notificationSettings", notificationSettings) + .add("parameters", parameters) + .add("performanceTarget", performanceTarget) + .add("queue", queue) + .add("runAs", runAs) + .add("schedule", schedule) + .add("tags", tags) + .add("tasks", tasks) + .add("timeoutSeconds", timeoutSeconds) + .add("trigger", trigger) + .add("webhookNotifications", webhookNotifications) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java index d1f4ac1e8..e2b1a9954 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** * The source of the job specification in the remote repository when the job is source controlled. */ @Generated +@JsonSerialize(using = JobSource.JobSourceSerializer.class) +@JsonDeserialize(using = JobSource.JobSourceDeserializer.class) public class JobSource { /** * Dirty state indicates the job is not fully synced with the job specification in the remote @@ -22,15 +33,12 @@ public class JobSource { * allowed for live edit. Import the remote job specification again from UI to make the job fully * synced. */ - @JsonProperty("dirty_state") private JobSourceDirtyState dirtyState; /** Name of the branch which the job is imported from. */ - @JsonProperty("import_from_git_branch") private String importFromGitBranch; /** Path of the job YAML file that contains the job specification. */ - @JsonProperty("job_config_path") private String jobConfigPath; public JobSource setDirtyState(JobSourceDirtyState dirtyState) { @@ -83,4 +91,41 @@ public String toString() { .add("jobConfigPath", jobConfigPath) .toString(); } + + JobSourcePb toPb() { + JobSourcePb pb = new JobSourcePb(); + pb.setDirtyState(dirtyState); + pb.setImportFromGitBranch(importFromGitBranch); + pb.setJobConfigPath(jobConfigPath); + + return pb; + } + + static JobSource fromPb(JobSourcePb pb) { + JobSource model = new JobSource(); + model.setDirtyState(pb.getDirtyState()); + model.setImportFromGitBranch(pb.getImportFromGitBranch()); + model.setJobConfigPath(pb.getJobConfigPath()); + + return model; + } + + public static class JobSourceSerializer extends JsonSerializer { + @Override + public void serialize(JobSource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobSourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobSourceDeserializer extends JsonDeserializer { + @Override + public JobSource deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobSourcePb pb = mapper.readValue(p, JobSourcePb.class); + return JobSource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourcePb.java new file mode 100755 index 000000000..3a2e1722f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourcePb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The source of the job specification in the remote repository when the job is source controlled. + */ +@Generated +class JobSourcePb { + @JsonProperty("dirty_state") + private JobSourceDirtyState dirtyState; + + @JsonProperty("import_from_git_branch") + private String importFromGitBranch; + + @JsonProperty("job_config_path") + private String jobConfigPath; + + public JobSourcePb setDirtyState(JobSourceDirtyState dirtyState) { + this.dirtyState = dirtyState; + return this; + } + + public JobSourceDirtyState getDirtyState() { + return dirtyState; + } + + public JobSourcePb setImportFromGitBranch(String importFromGitBranch) { + this.importFromGitBranch = importFromGitBranch; + return this; + } + + public String getImportFromGitBranch() { + return importFromGitBranch; + } + + public JobSourcePb setJobConfigPath(String jobConfigPath) { + this.jobConfigPath = jobConfigPath; + return this; + } + + public String getJobConfigPath() { + return jobConfigPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobSourcePb that = (JobSourcePb) o; + return Objects.equals(dirtyState, that.dirtyState) + && Objects.equals(importFromGitBranch, that.importFromGitBranch) + && Objects.equals(jobConfigPath, that.jobConfigPath); + } + + @Override + public int hashCode() { + return Objects.hash(dirtyState, importFromGitBranch, jobConfigPath); + } + + @Override + public String toString() { + return new ToStringer(JobSourcePb.class) + .add("dirtyState", dirtyState) + .add("importFromGitBranch", importFromGitBranch) + .add("jobConfigPath", jobConfigPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java index c76e051bb..361fb2cb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobsHealthRule.JobsHealthRuleSerializer.class) +@JsonDeserialize(using = JobsHealthRule.JobsHealthRuleDeserializer.class) public class JobsHealthRule { /** * Specifies the health metric that is being evaluated for a particular health rule. @@ -20,19 +31,16 @@ public class JobsHealthRule { * metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of * outstanding files across all streams. This metric is in Public Preview. */ - @JsonProperty("metric") private JobsHealthMetric metric; /** * Specifies the operator used to compare the health metric value with the specified threshold. */ - @JsonProperty("op") private JobsHealthOperator op; /** * Specifies the threshold value that the health metric should obey to satisfy the health rule. */ - @JsonProperty("value") private Long value; public JobsHealthRule setMetric(JobsHealthMetric metric) { @@ -85,4 +93,42 @@ public String toString() { .add("value", value) .toString(); } + + JobsHealthRulePb toPb() { + JobsHealthRulePb pb = new JobsHealthRulePb(); + pb.setMetric(metric); + pb.setOp(op); + pb.setValue(value); + + return pb; + } + + static JobsHealthRule fromPb(JobsHealthRulePb pb) { + JobsHealthRule model = new JobsHealthRule(); + model.setMetric(pb.getMetric()); + model.setOp(pb.getOp()); + model.setValue(pb.getValue()); + + return model; + } + + public static class JobsHealthRuleSerializer extends JsonSerializer { + @Override + public void serialize(JobsHealthRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobsHealthRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobsHealthRuleDeserializer extends JsonDeserializer { + @Override + public JobsHealthRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobsHealthRulePb pb = mapper.readValue(p, JobsHealthRulePb.class); + return JobsHealthRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulePb.java new file mode 100755 index 000000000..be667a0db --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobsHealthRulePb { + @JsonProperty("metric") + private JobsHealthMetric metric; + + @JsonProperty("op") + private JobsHealthOperator op; + + @JsonProperty("value") + private Long value; + + public JobsHealthRulePb setMetric(JobsHealthMetric metric) { + this.metric = metric; + return this; + } + + public JobsHealthMetric getMetric() { + return metric; + } + + public JobsHealthRulePb setOp(JobsHealthOperator op) { + this.op = op; + return this; + } + + public JobsHealthOperator getOp() { + return op; + } + + public JobsHealthRulePb setValue(Long value) { + this.value = value; + return this; + } + + public Long getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobsHealthRulePb that = (JobsHealthRulePb) o; + return Objects.equals(metric, that.metric) + && Objects.equals(op, that.op) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(metric, op, value); + } + + @Override + public String toString() { + return new ToStringer(JobsHealthRulePb.class) + .add("metric", metric) + .add("op", op) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java index 45f3e3015..1f1bae743 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** An optional set of health rules that can be defined for this job. */ @Generated +@JsonSerialize(using = JobsHealthRules.JobsHealthRulesSerializer.class) +@JsonDeserialize(using = JobsHealthRules.JobsHealthRulesDeserializer.class) public class JobsHealthRules { /** */ - @JsonProperty("rules") private Collection rules; public JobsHealthRules setRules(Collection rules) { @@ -41,4 +51,38 @@ public int hashCode() { public String toString() { return new ToStringer(JobsHealthRules.class).add("rules", rules).toString(); } + + JobsHealthRulesPb toPb() { + JobsHealthRulesPb pb = new JobsHealthRulesPb(); + pb.setRules(rules); + + return pb; + } + + static JobsHealthRules fromPb(JobsHealthRulesPb pb) { + JobsHealthRules model = new JobsHealthRules(); + model.setRules(pb.getRules()); + + return model; + } + + public static class JobsHealthRulesSerializer extends JsonSerializer { + @Override + public void serialize(JobsHealthRules value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobsHealthRulesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobsHealthRulesDeserializer extends JsonDeserializer { + @Override + public JobsHealthRules deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobsHealthRulesPb pb = mapper.readValue(p, JobsHealthRulesPb.class); + return JobsHealthRules.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulesPb.java new file mode 100755 index 000000000..47e91b14d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRulesPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** An optional set of health rules that can be defined for this job. */ +@Generated +class JobsHealthRulesPb { + @JsonProperty("rules") + private Collection rules; + + public JobsHealthRulesPb setRules(Collection rules) { + this.rules = rules; + return this; + } + + public Collection getRules() { + return rules; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobsHealthRulesPb that = (JobsHealthRulesPb) o; + return Objects.equals(rules, that.rules); + } + + @Override + public int hashCode() { + return Objects.hash(rules); + } + + @Override + public String toString() { + return new ToStringer(JobsHealthRulesPb.class).add("rules", rules).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java index 0902a4b5f..48d138c89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java @@ -21,7 +21,7 @@ public void cancelAllRuns(CancelAllRuns request) { String path = "/api/2.2/jobs/runs/cancel-all"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CancelAllRunsResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public void cancelRun(CancelRun request) { String path = "/api/2.2/jobs/runs/cancel"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CancelRunResponse.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public CreateResponse create(CreateJob request) { String path = "/api/2.2/jobs/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateResponse.class); @@ -61,7 +61,7 @@ public void delete(DeleteJob request) { String path = "/api/2.2/jobs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public void deleteRun(DeleteRun request) { String path = "/api/2.2/jobs/runs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteRunResponse.class); } catch (IOException e) { @@ -87,7 +87,7 @@ public ExportRunOutput exportRun(ExportRunRequest request) { String path = "/api/2.2/jobs/runs/export"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ExportRunOutput.class); } catch (IOException e) { @@ -100,7 +100,7 @@ public Job get(GetJobRequest request) { String path = "/api/2.2/jobs/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Job.class); } catch (IOException e) { @@ -114,7 +114,7 @@ public GetJobPermissionLevelsResponse getPermissionLevels(GetJobPermissionLevels String.format("/api/2.0/permissions/jobs/%s/permissionLevels", request.getJobId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetJobPermissionLevelsResponse.class); } catch (IOException e) { @@ -127,7 +127,7 @@ public JobPermissions getPermissions(GetJobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, JobPermissions.class); } catch (IOException e) { @@ -140,7 +140,7 @@ public Run getRun(GetRunRequest request) { String path = "/api/2.2/jobs/runs/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Run.class); } catch (IOException e) { @@ -153,7 +153,7 @@ public RunOutput getRunOutput(GetRunOutputRequest request) { String path = "/api/2.2/jobs/runs/get-output"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RunOutput.class); } catch (IOException e) { @@ -166,7 +166,7 @@ public ListJobsResponse list(ListJobsRequest request) { String path = "/api/2.2/jobs/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListJobsResponse.class); } catch (IOException e) { @@ -179,7 +179,7 @@ public ListRunsResponse listRuns(ListRunsRequest request) { String path = "/api/2.2/jobs/runs/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListRunsResponse.class); } catch (IOException e) { @@ -192,7 +192,7 @@ public RepairRunResponse repairRun(RepairRun request) { String path = "/api/2.2/jobs/runs/repair"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RepairRunResponse.class); @@ -206,7 +206,7 @@ public void reset(ResetJob request) { String path = "/api/2.2/jobs/reset"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ResetResponse.class); } catch (IOException e) { @@ -219,7 +219,7 @@ public RunNowResponse runNow(RunNow request) { String path = "/api/2.2/jobs/run-now"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RunNowResponse.class); @@ -233,7 +233,7 @@ public JobPermissions setPermissions(JobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, JobPermissions.class); @@ -247,7 +247,7 @@ public SubmitRunResponse submit(SubmitRun request) { String path = "/api/2.2/jobs/runs/submit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SubmitRunResponse.class); @@ -261,7 +261,7 @@ public void update(UpdateJob request) { String path = "/api/2.2/jobs/update"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { @@ -274,7 +274,7 @@ public JobPermissions updatePermissions(JobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, JobPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponse.java index 46647f1f1..a5b89b3cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponse.java @@ -4,28 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListJobComplianceForPolicyResponse.ListJobComplianceForPolicyResponseSerializer.class) +@JsonDeserialize( + using = ListJobComplianceForPolicyResponse.ListJobComplianceForPolicyResponseDeserializer.class) public class ListJobComplianceForPolicyResponse { /** A list of jobs and their policy compliance statuses. */ - @JsonProperty("jobs") private Collection jobs; /** * This field represents the pagination token to retrieve the next page of results. If this field * is not in the response, it means no further results for the request. */ - @JsonProperty("next_page_token") private String nextPageToken; /** * This field represents the pagination token to retrieve the previous page of results. If this * field is not in the response, it means no further results for the request. */ - @JsonProperty("prev_page_token") private String prevPageToken; public ListJobComplianceForPolicyResponse setJobs(Collection jobs) { @@ -78,4 +88,46 @@ public String toString() { .add("prevPageToken", prevPageToken) .toString(); } + + ListJobComplianceForPolicyResponsePb toPb() { + ListJobComplianceForPolicyResponsePb pb = new ListJobComplianceForPolicyResponsePb(); + pb.setJobs(jobs); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + + return pb; + } + + static ListJobComplianceForPolicyResponse fromPb(ListJobComplianceForPolicyResponsePb pb) { + ListJobComplianceForPolicyResponse model = new ListJobComplianceForPolicyResponse(); + model.setJobs(pb.getJobs()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + + return model; + } + + public static class ListJobComplianceForPolicyResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListJobComplianceForPolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListJobComplianceForPolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListJobComplianceForPolicyResponseDeserializer + extends JsonDeserializer { + @Override + public ListJobComplianceForPolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListJobComplianceForPolicyResponsePb pb = + mapper.readValue(p, ListJobComplianceForPolicyResponsePb.class); + return ListJobComplianceForPolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponsePb.java new file mode 100755 index 000000000..1d1962217 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceForPolicyResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListJobComplianceForPolicyResponsePb { + @JsonProperty("jobs") + private Collection jobs; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + public ListJobComplianceForPolicyResponsePb setJobs(Collection jobs) { + this.jobs = jobs; + return this; + } + + public Collection getJobs() { + return jobs; + } + + public ListJobComplianceForPolicyResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListJobComplianceForPolicyResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListJobComplianceForPolicyResponsePb that = (ListJobComplianceForPolicyResponsePb) o; + return Objects.equals(jobs, that.jobs) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(jobs, nextPageToken, prevPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListJobComplianceForPolicyResponsePb.class) + .add("jobs", jobs) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequest.java index 0aa6b6b42..58e720220 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequest.java @@ -3,33 +3,37 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List job policy compliance */ @Generated +@JsonSerialize(using = ListJobComplianceRequest.ListJobComplianceRequestSerializer.class) +@JsonDeserialize(using = ListJobComplianceRequest.ListJobComplianceRequestDeserializer.class) public class ListJobComplianceRequest { /** * Use this field to specify the maximum number of results to be returned by the server. The * server may further constrain the maximum number of results returned in a single page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** * A page token that can be used to navigate to the next page or previous page as returned by * `next_page_token` or `prev_page_token`. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Canonical unique identifier for the cluster policy. */ - @JsonIgnore - @QueryParam("policy_id") private String policyId; public ListJobComplianceRequest setPageSize(Long pageSize) { @@ -82,4 +86,45 @@ public String toString() { .add("policyId", policyId) .toString(); } + + ListJobComplianceRequestPb toPb() { + ListJobComplianceRequestPb pb = new ListJobComplianceRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setPolicyId(policyId); + + return pb; + } + + static ListJobComplianceRequest fromPb(ListJobComplianceRequestPb pb) { + ListJobComplianceRequest model = new ListJobComplianceRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class ListJobComplianceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListJobComplianceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListJobComplianceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListJobComplianceRequestDeserializer + extends JsonDeserializer { + @Override + public ListJobComplianceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListJobComplianceRequestPb pb = mapper.readValue(p, ListJobComplianceRequestPb.class); + return ListJobComplianceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequestPb.java new file mode 100755 index 000000000..919661369 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobComplianceRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List job policy compliance */ +@Generated +class ListJobComplianceRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("policy_id") + private String policyId; + + public ListJobComplianceRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListJobComplianceRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListJobComplianceRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListJobComplianceRequestPb that = (ListJobComplianceRequestPb) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, policyId); + } + + @Override + public String toString() { + return new ToStringer(ListJobComplianceRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java index cae63d9e8..502046fe4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java @@ -3,49 +3,49 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List jobs */ @Generated +@JsonSerialize(using = ListJobsRequest.ListJobsRequestSerializer.class) +@JsonDeserialize(using = ListJobsRequest.ListJobsRequestDeserializer.class) public class ListJobsRequest { /** * Whether to include task and cluster details in the response. Note that only the first 100 * elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. */ - @JsonIgnore - @QueryParam("expand_tasks") private Boolean expandTasks; /** * The number of jobs to return. This value must be greater than 0 and less or equal to 100. The * default value is 20. */ - @JsonIgnore - @QueryParam("limit") private Long limit; /** A filter on the list based on the exact (case insensitive) job name. */ - @JsonIgnore - @QueryParam("name") private String name; /** * The offset of the first job to return, relative to the most recently created job. Deprecated * since June 2023. Use `page_token` to iterate through the pages instead. */ - @JsonIgnore - @QueryParam("offset") private Long offset; /** * Use `next_page_token` or `prev_page_token` returned from the previous request to list the next * or previous page of jobs respectively. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListJobsRequest setExpandTasks(Boolean expandTasks) { @@ -120,4 +120,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListJobsRequestPb toPb() { + ListJobsRequestPb pb = new ListJobsRequestPb(); + pb.setExpandTasks(expandTasks); + pb.setLimit(limit); + pb.setName(name); + pb.setOffset(offset); + pb.setPageToken(pageToken); + + return pb; + } + + static ListJobsRequest fromPb(ListJobsRequestPb pb) { + ListJobsRequest model = new ListJobsRequest(); + model.setExpandTasks(pb.getExpandTasks()); + model.setLimit(pb.getLimit()); + model.setName(pb.getName()); + model.setOffset(pb.getOffset()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListJobsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListJobsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListJobsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListJobsRequestDeserializer extends JsonDeserializer { + @Override + public ListJobsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListJobsRequestPb pb = mapper.readValue(p, ListJobsRequestPb.class); + return ListJobsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequestPb.java new file mode 100755 index 000000000..35f18a5df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List jobs */ +@Generated +class ListJobsRequestPb { + @JsonIgnore + @QueryParam("expand_tasks") + private Boolean expandTasks; + + @JsonIgnore + @QueryParam("limit") + private Long limit; + + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("offset") + private Long offset; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListJobsRequestPb setExpandTasks(Boolean expandTasks) { + this.expandTasks = expandTasks; + return this; + } + + public Boolean getExpandTasks() { + return expandTasks; + } + + public ListJobsRequestPb setLimit(Long limit) { + this.limit = limit; + return this; + } + + public Long getLimit() { + return limit; + } + + public ListJobsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ListJobsRequestPb setOffset(Long offset) { + this.offset = offset; + return this; + } + + public Long getOffset() { + return offset; + } + + public ListJobsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListJobsRequestPb that = (ListJobsRequestPb) o; + return Objects.equals(expandTasks, that.expandTasks) + && Objects.equals(limit, that.limit) + && Objects.equals(name, that.name) + && Objects.equals(offset, that.offset) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(expandTasks, limit, name, offset, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListJobsRequestPb.class) + .add("expandTasks", expandTasks) + .add("limit", limit) + .add("name", name) + .add("offset", offset) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java index b45e2ca24..29ad85916 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java @@ -4,27 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List of jobs was retrieved successfully. */ @Generated +@JsonSerialize(using = ListJobsResponse.ListJobsResponseSerializer.class) +@JsonDeserialize(using = ListJobsResponse.ListJobsResponseDeserializer.class) public class ListJobsResponse { /** If true, additional jobs matching the provided filter are available for listing. */ - @JsonProperty("has_more") private Boolean hasMore; /** The list of jobs. Only included in the response if there are jobs to list. */ - @JsonProperty("jobs") private Collection jobs; /** A token that can be used to list the next page of jobs (if applicable). */ - @JsonProperty("next_page_token") private String nextPageToken; /** A token that can be used to list the previous page of jobs (if applicable). */ - @JsonProperty("prev_page_token") private String prevPageToken; public ListJobsResponse setHasMore(Boolean hasMore) { @@ -88,4 +95,44 @@ public String toString() { .add("prevPageToken", prevPageToken) .toString(); } + + ListJobsResponsePb toPb() { + ListJobsResponsePb pb = new ListJobsResponsePb(); + pb.setHasMore(hasMore); + pb.setJobs(jobs); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + + return pb; + } + + static ListJobsResponse fromPb(ListJobsResponsePb pb) { + ListJobsResponse model = new ListJobsResponse(); + model.setHasMore(pb.getHasMore()); + model.setJobs(pb.getJobs()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + + return model; + } + + public static class ListJobsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListJobsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListJobsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListJobsResponseDeserializer extends JsonDeserializer { + @Override + public ListJobsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListJobsResponsePb pb = mapper.readValue(p, ListJobsResponsePb.class); + return ListJobsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponsePb.java new file mode 100755 index 000000000..b5a5e5afc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponsePb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** List of jobs was retrieved successfully. */ +@Generated +class ListJobsResponsePb { + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("jobs") + private Collection jobs; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + public ListJobsResponsePb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public ListJobsResponsePb setJobs(Collection jobs) { + this.jobs = jobs; + return this; + } + + public Collection getJobs() { + return jobs; + } + + public ListJobsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListJobsResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListJobsResponsePb that = (ListJobsResponsePb) o; + return Objects.equals(hasMore, that.hasMore) + && Objects.equals(jobs, that.jobs) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(hasMore, jobs, nextPageToken, prevPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListJobsResponsePb.class) + .add("hasMore", hasMore) + .add("jobs", jobs) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java index 2f28cf7ef..3e2c650a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java @@ -3,87 +3,77 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List job runs */ @Generated +@JsonSerialize(using = ListRunsRequest.ListRunsRequestSerializer.class) +@JsonDeserialize(using = ListRunsRequest.ListRunsRequestDeserializer.class) public class ListRunsRequest { /** * If active_only is `true`, only active runs are included in the results; otherwise, lists both * active and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or * `TERMINATING`. This field cannot be `true` when completed_only is `true`. */ - @JsonIgnore - @QueryParam("active_only") private Boolean activeOnly; /** * If completed_only is `true`, only completed runs are included in the results; otherwise, lists * both active and completed runs. This field cannot be `true` when active_only is `true`. */ - @JsonIgnore - @QueryParam("completed_only") private Boolean completedOnly; /** * Whether to include task and cluster details in the response. Note that only the first 100 * elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters. */ - @JsonIgnore - @QueryParam("expand_tasks") private Boolean expandTasks; /** The job for which to list runs. If omitted, the Jobs service lists runs from all jobs. */ - @JsonIgnore - @QueryParam("job_id") private Long jobId; /** * The number of runs to return. This value must be greater than 0 and less than 25. The default * value is 20. If a request specifies a limit of 0, the service instead uses the maximum limit. */ - @JsonIgnore - @QueryParam("limit") private Long limit; /** * The offset of the first run to return, relative to the most recent run. Deprecated since June * 2023. Use `page_token` to iterate through the pages instead. */ - @JsonIgnore - @QueryParam("offset") private Long offset; /** * Use `next_page_token` or `prev_page_token` returned from the previous request to list the next * or previous page of runs respectively. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The type of runs to return. For a description of run types, see :method:jobs/getRun. */ - @JsonIgnore - @QueryParam("run_type") private RunType runType; /** * Show runs that started _at or after_ this value. The value must be a UTC timestamp in * milliseconds. Can be combined with _start_time_to_ to filter by a time range. */ - @JsonIgnore - @QueryParam("start_time_from") private Long startTimeFrom; /** * Show runs that started _at or before_ this value. The value must be a UTC timestamp in * milliseconds. Can be combined with _start_time_from_ to filter by a time range. */ - @JsonIgnore - @QueryParam("start_time_to") private Long startTimeTo; public ListRunsRequest setActiveOnly(Boolean activeOnly) { @@ -223,4 +213,56 @@ public String toString() { .add("startTimeTo", startTimeTo) .toString(); } + + ListRunsRequestPb toPb() { + ListRunsRequestPb pb = new ListRunsRequestPb(); + pb.setActiveOnly(activeOnly); + pb.setCompletedOnly(completedOnly); + pb.setExpandTasks(expandTasks); + pb.setJobId(jobId); + pb.setLimit(limit); + pb.setOffset(offset); + pb.setPageToken(pageToken); + pb.setRunType(runType); + pb.setStartTimeFrom(startTimeFrom); + pb.setStartTimeTo(startTimeTo); + + return pb; + } + + static ListRunsRequest fromPb(ListRunsRequestPb pb) { + ListRunsRequest model = new ListRunsRequest(); + model.setActiveOnly(pb.getActiveOnly()); + model.setCompletedOnly(pb.getCompletedOnly()); + model.setExpandTasks(pb.getExpandTasks()); + model.setJobId(pb.getJobId()); + model.setLimit(pb.getLimit()); + model.setOffset(pb.getOffset()); + model.setPageToken(pb.getPageToken()); + model.setRunType(pb.getRunType()); + model.setStartTimeFrom(pb.getStartTimeFrom()); + model.setStartTimeTo(pb.getStartTimeTo()); + + return model; + } + + public static class ListRunsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListRunsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRunsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRunsRequestDeserializer extends JsonDeserializer { + @Override + public ListRunsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRunsRequestPb pb = mapper.readValue(p, ListRunsRequestPb.class); + return ListRunsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequestPb.java new file mode 100755 index 000000000..756099f75 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequestPb.java @@ -0,0 +1,191 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List job runs */ +@Generated +class ListRunsRequestPb { + @JsonIgnore + @QueryParam("active_only") + private Boolean activeOnly; + + @JsonIgnore + @QueryParam("completed_only") + private Boolean completedOnly; + + @JsonIgnore + @QueryParam("expand_tasks") + private Boolean expandTasks; + + @JsonIgnore + @QueryParam("job_id") + private Long jobId; + + @JsonIgnore + @QueryParam("limit") + private Long limit; + + @JsonIgnore + @QueryParam("offset") + private Long offset; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("run_type") + private RunType runType; + + @JsonIgnore + @QueryParam("start_time_from") + private Long startTimeFrom; + + @JsonIgnore + @QueryParam("start_time_to") + private Long startTimeTo; + + public ListRunsRequestPb setActiveOnly(Boolean activeOnly) { + this.activeOnly = activeOnly; + return this; + } + + public Boolean getActiveOnly() { + return activeOnly; + } + + public ListRunsRequestPb setCompletedOnly(Boolean completedOnly) { + this.completedOnly = completedOnly; + return this; + } + + public Boolean getCompletedOnly() { + return completedOnly; + } + + public ListRunsRequestPb setExpandTasks(Boolean expandTasks) { + this.expandTasks = expandTasks; + return this; + } + + public Boolean getExpandTasks() { + return expandTasks; + } + + public ListRunsRequestPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public ListRunsRequestPb setLimit(Long limit) { + this.limit = limit; + return this; + } + + public Long getLimit() { + return limit; + } + + public ListRunsRequestPb setOffset(Long offset) { + this.offset = offset; + return this; + } + + public Long getOffset() { + return offset; + } + + public ListRunsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListRunsRequestPb setRunType(RunType runType) { + this.runType = runType; + return this; + } + + public RunType getRunType() { + return runType; + } + + public ListRunsRequestPb setStartTimeFrom(Long startTimeFrom) { + this.startTimeFrom = startTimeFrom; + return this; + } + + public Long getStartTimeFrom() { + return startTimeFrom; + } + + public ListRunsRequestPb setStartTimeTo(Long startTimeTo) { + this.startTimeTo = startTimeTo; + return this; + } + + public Long getStartTimeTo() { + return startTimeTo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRunsRequestPb that = (ListRunsRequestPb) o; + return Objects.equals(activeOnly, that.activeOnly) + && Objects.equals(completedOnly, that.completedOnly) + && Objects.equals(expandTasks, that.expandTasks) + && Objects.equals(jobId, that.jobId) + && Objects.equals(limit, that.limit) + && Objects.equals(offset, that.offset) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(runType, that.runType) + && Objects.equals(startTimeFrom, that.startTimeFrom) + && Objects.equals(startTimeTo, that.startTimeTo); + } + + @Override + public int hashCode() { + return Objects.hash( + activeOnly, + completedOnly, + expandTasks, + jobId, + limit, + offset, + pageToken, + runType, + startTimeFrom, + startTimeTo); + } + + @Override + public String toString() { + return new ToStringer(ListRunsRequestPb.class) + .add("activeOnly", activeOnly) + .add("completedOnly", completedOnly) + .add("expandTasks", expandTasks) + .add("jobId", jobId) + .add("limit", limit) + .add("offset", offset) + .add("pageToken", pageToken) + .add("runType", runType) + .add("startTimeFrom", startTimeFrom) + .add("startTimeTo", startTimeTo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java index 8e6ecea17..56df5b5d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List of runs was retrieved successfully. */ @Generated +@JsonSerialize(using = ListRunsResponse.ListRunsResponseSerializer.class) +@JsonDeserialize(using = ListRunsResponse.ListRunsResponseDeserializer.class) public class ListRunsResponse { /** If true, additional runs matching the provided filter are available for listing. */ - @JsonProperty("has_more") private Boolean hasMore; /** A token that can be used to list the next page of runs (if applicable). */ - @JsonProperty("next_page_token") private String nextPageToken; /** A token that can be used to list the previous page of runs (if applicable). */ - @JsonProperty("prev_page_token") private String prevPageToken; /** * A list of runs, from most recently started to least. Only included in the response if there are * runs to list. */ - @JsonProperty("runs") private Collection runs; public ListRunsResponse setHasMore(Boolean hasMore) { @@ -91,4 +98,44 @@ public String toString() { .add("runs", runs) .toString(); } + + ListRunsResponsePb toPb() { + ListRunsResponsePb pb = new ListRunsResponsePb(); + pb.setHasMore(hasMore); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + pb.setRuns(runs); + + return pb; + } + + static ListRunsResponse fromPb(ListRunsResponsePb pb) { + ListRunsResponse model = new ListRunsResponse(); + model.setHasMore(pb.getHasMore()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + model.setRuns(pb.getRuns()); + + return model; + } + + public static class ListRunsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRunsResponseDeserializer extends JsonDeserializer { + @Override + public ListRunsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRunsResponsePb pb = mapper.readValue(p, ListRunsResponsePb.class); + return ListRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponsePb.java new file mode 100755 index 000000000..2dc8e6ea4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponsePb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** List of runs was retrieved successfully. */ +@Generated +class ListRunsResponsePb { + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + @JsonProperty("runs") + private Collection runs; + + public ListRunsResponsePb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public ListRunsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListRunsResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + public ListRunsResponsePb setRuns(Collection runs) { + this.runs = runs; + return this; + } + + public Collection getRuns() { + return runs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRunsResponsePb that = (ListRunsResponsePb) o; + return Objects.equals(hasMore, that.hasMore) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken) + && Objects.equals(runs, that.runs); + } + + @Override + public int hashCode() { + return Objects.hash(hasMore, nextPageToken, prevPageToken, runs); + } + + @Override + public String toString() { + return new ToStringer(ListRunsResponsePb.class) + .add("hasMore", hasMore) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .add("runs", runs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java index ced9c3ecb..bb804c0e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NotebookOutput.NotebookOutputSerializer.class) +@JsonDeserialize(using = NotebookOutput.NotebookOutputDeserializer.class) public class NotebookOutput { /** * The value passed to @@ -16,11 +27,9 @@ public class NotebookOutput { * job can store the results in a cloud storage service. This field is absent if * `dbutils.notebook.exit()` was never called. */ - @JsonProperty("result") private String result; /** Whether or not the result was truncated. */ - @JsonProperty("truncated") private Boolean truncated; public NotebookOutput setResult(String result) { @@ -61,4 +70,40 @@ public String toString() { .add("truncated", truncated) .toString(); } + + NotebookOutputPb toPb() { + NotebookOutputPb pb = new NotebookOutputPb(); + pb.setResult(result); + pb.setTruncated(truncated); + + return pb; + } + + static NotebookOutput fromPb(NotebookOutputPb pb) { + NotebookOutput model = new NotebookOutput(); + model.setResult(pb.getResult()); + model.setTruncated(pb.getTruncated()); + + return model; + } + + public static class NotebookOutputSerializer extends JsonSerializer { + @Override + public void serialize(NotebookOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotebookOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotebookOutputDeserializer extends JsonDeserializer { + @Override + public NotebookOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotebookOutputPb pb = mapper.readValue(p, NotebookOutputPb.class); + return NotebookOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutputPb.java new file mode 100755 index 000000000..af1968d9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutputPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NotebookOutputPb { + @JsonProperty("result") + private String result; + + @JsonProperty("truncated") + private Boolean truncated; + + public NotebookOutputPb setResult(String result) { + this.result = result; + return this; + } + + public String getResult() { + return result; + } + + public NotebookOutputPb setTruncated(Boolean truncated) { + this.truncated = truncated; + return this; + } + + public Boolean getTruncated() { + return truncated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotebookOutputPb that = (NotebookOutputPb) o; + return Objects.equals(result, that.result) && Objects.equals(truncated, that.truncated); + } + + @Override + public int hashCode() { + return Objects.hash(result, truncated); + } + + @Override + public String toString() { + return new ToStringer(NotebookOutputPb.class) + .add("result", result) + .add("truncated", truncated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java index 4251a2493..0922d2d3d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = NotebookTask.NotebookTaskSerializer.class) +@JsonDeserialize(using = NotebookTask.NotebookTaskDeserializer.class) public class NotebookTask { /** * Base parameters to be used for each run of this job. If the run is initiated by a call to @@ -27,7 +38,6 @@ public class NotebookTask { * [dbutils.widgets.get]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets */ - @JsonProperty("base_parameters") private Map baseParameters; /** @@ -35,7 +45,6 @@ public class NotebookTask { * notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. * For notebooks stored in a remote repository, the path must be relative. This field is required. */ - @JsonProperty("notebook_path") private String notebookPath; /** @@ -45,7 +54,6 @@ public class NotebookTask { * `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in * Databricks workspace. * `GIT`: Notebook is located in cloud Git provider. */ - @JsonProperty("source") private Source source; /** @@ -55,7 +63,6 @@ public class NotebookTask { *

Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the * run will fail. */ - @JsonProperty("warehouse_id") private String warehouseId; public NotebookTask setBaseParameters(Map baseParameters) { @@ -119,4 +126,43 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + NotebookTaskPb toPb() { + NotebookTaskPb pb = new NotebookTaskPb(); + pb.setBaseParameters(baseParameters); + pb.setNotebookPath(notebookPath); + pb.setSource(source); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static NotebookTask fromPb(NotebookTaskPb pb) { + NotebookTask model = new NotebookTask(); + model.setBaseParameters(pb.getBaseParameters()); + model.setNotebookPath(pb.getNotebookPath()); + model.setSource(pb.getSource()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class NotebookTaskSerializer extends JsonSerializer { + @Override + public void serialize(NotebookTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotebookTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotebookTaskDeserializer extends JsonDeserializer { + @Override + public NotebookTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotebookTaskPb pb = mapper.readValue(p, NotebookTaskPb.class); + return NotebookTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTaskPb.java new file mode 100755 index 000000000..67713a529 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTaskPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class NotebookTaskPb { + @JsonProperty("base_parameters") + private Map baseParameters; + + @JsonProperty("notebook_path") + private String notebookPath; + + @JsonProperty("source") + private Source source; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public NotebookTaskPb setBaseParameters(Map baseParameters) { + this.baseParameters = baseParameters; + return this; + } + + public Map getBaseParameters() { + return baseParameters; + } + + public NotebookTaskPb setNotebookPath(String notebookPath) { + this.notebookPath = notebookPath; + return this; + } + + public String getNotebookPath() { + return notebookPath; + } + + public NotebookTaskPb setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + public NotebookTaskPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotebookTaskPb that = (NotebookTaskPb) o; + return Objects.equals(baseParameters, that.baseParameters) + && Objects.equals(notebookPath, that.notebookPath) + && Objects.equals(source, that.source) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(baseParameters, notebookPath, source, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(NotebookTaskPb.class) + .add("baseParameters", baseParameters) + .add("notebookPath", notebookPath) + .add("source", source) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java index 83d2cfa9a..d2d37ac09 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,17 +21,16 @@ * run. */ @Generated +@JsonSerialize(using = OutputSchemaInfo.OutputSchemaInfoSerializer.class) +@JsonDeserialize(using = OutputSchemaInfo.OutputSchemaInfoDeserializer.class) public class OutputSchemaInfo { /** */ - @JsonProperty("catalog_name") private String catalogName; /** The expiration time for the output schema as a Unix timestamp in milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** */ - @JsonProperty("schema_name") private String schemaName; public OutputSchemaInfo setCatalogName(String catalogName) { @@ -75,4 +83,42 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + OutputSchemaInfoPb toPb() { + OutputSchemaInfoPb pb = new OutputSchemaInfoPb(); + pb.setCatalogName(catalogName); + pb.setExpirationTime(expirationTime); + pb.setSchemaName(schemaName); + + return pb; + } + + static OutputSchemaInfo fromPb(OutputSchemaInfoPb pb) { + OutputSchemaInfo model = new OutputSchemaInfo(); + model.setCatalogName(pb.getCatalogName()); + model.setExpirationTime(pb.getExpirationTime()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class OutputSchemaInfoSerializer extends JsonSerializer { + @Override + public void serialize(OutputSchemaInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OutputSchemaInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OutputSchemaInfoDeserializer extends JsonDeserializer { + @Override + public OutputSchemaInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OutputSchemaInfoPb pb = mapper.readValue(p, OutputSchemaInfoPb.class); + return OutputSchemaInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfoPb.java new file mode 100755 index 000000000..3ff73563c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfoPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Stores the catalog name, schema name, and the output schema expiration time for the clean room + * run. + */ +@Generated +class OutputSchemaInfoPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("schema_name") + private String schemaName; + + public OutputSchemaInfoPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public OutputSchemaInfoPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public OutputSchemaInfoPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OutputSchemaInfoPb that = (OutputSchemaInfoPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, expirationTime, schemaName); + } + + @Override + public String toString() { + return new ToStringer(OutputSchemaInfoPb.class) + .add("catalogName", catalogName) + .add("expirationTime", expirationTime) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfiguration.java index 46e62b48e..933a46416 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfiguration.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PeriodicTriggerConfiguration.PeriodicTriggerConfigurationSerializer.class) +@JsonDeserialize( + using = PeriodicTriggerConfiguration.PeriodicTriggerConfigurationDeserializer.class) public class PeriodicTriggerConfiguration { /** The interval at which the trigger should run. */ - @JsonProperty("interval") private Long interval; /** The unit of time for the interval. */ - @JsonProperty("unit") private PeriodicTriggerConfigurationTimeUnit unit; public PeriodicTriggerConfiguration setInterval(Long interval) { @@ -55,4 +65,43 @@ public String toString() { .add("unit", unit) .toString(); } + + PeriodicTriggerConfigurationPb toPb() { + PeriodicTriggerConfigurationPb pb = new PeriodicTriggerConfigurationPb(); + pb.setInterval(interval); + pb.setUnit(unit); + + return pb; + } + + static PeriodicTriggerConfiguration fromPb(PeriodicTriggerConfigurationPb pb) { + PeriodicTriggerConfiguration model = new PeriodicTriggerConfiguration(); + model.setInterval(pb.getInterval()); + model.setUnit(pb.getUnit()); + + return model; + } + + public static class PeriodicTriggerConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + PeriodicTriggerConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PeriodicTriggerConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PeriodicTriggerConfigurationDeserializer + extends JsonDeserializer { + @Override + public PeriodicTriggerConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PeriodicTriggerConfigurationPb pb = mapper.readValue(p, PeriodicTriggerConfigurationPb.class); + return PeriodicTriggerConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfigurationPb.java new file mode 100755 index 000000000..2e695aba7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PeriodicTriggerConfigurationPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PeriodicTriggerConfigurationPb { + @JsonProperty("interval") + private Long interval; + + @JsonProperty("unit") + private PeriodicTriggerConfigurationTimeUnit unit; + + public PeriodicTriggerConfigurationPb setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + public PeriodicTriggerConfigurationPb setUnit(PeriodicTriggerConfigurationTimeUnit unit) { + this.unit = unit; + return this; + } + + public PeriodicTriggerConfigurationTimeUnit getUnit() { + return unit; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PeriodicTriggerConfigurationPb that = (PeriodicTriggerConfigurationPb) o; + return Objects.equals(interval, that.interval) && Objects.equals(unit, that.unit); + } + + @Override + public int hashCode() { + return Objects.hash(interval, unit); + } + + @Override + public String toString() { + return new ToStringer(PeriodicTriggerConfigurationPb.class) + .add("interval", interval) + .add("unit", unit) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParams.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParams.java index 9ec53d7e6..d810a0b40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParams.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParams.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineParams.PipelineParamsSerializer.class) +@JsonDeserialize(using = PipelineParams.PipelineParamsDeserializer.class) public class PipelineParams { /** If true, triggers a full refresh on the delta live table. */ - @JsonProperty("full_refresh") private Boolean fullRefresh; public PipelineParams setFullRefresh(Boolean fullRefresh) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(PipelineParams.class).add("fullRefresh", fullRefresh).toString(); } + + PipelineParamsPb toPb() { + PipelineParamsPb pb = new PipelineParamsPb(); + pb.setFullRefresh(fullRefresh); + + return pb; + } + + static PipelineParams fromPb(PipelineParamsPb pb) { + PipelineParams model = new PipelineParams(); + model.setFullRefresh(pb.getFullRefresh()); + + return model; + } + + public static class PipelineParamsSerializer extends JsonSerializer { + @Override + public void serialize(PipelineParams value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineParamsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineParamsDeserializer extends JsonDeserializer { + @Override + public PipelineParams deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineParamsPb pb = mapper.readValue(p, PipelineParamsPb.class); + return PipelineParams.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParamsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParamsPb.java new file mode 100755 index 000000000..e1fa750d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineParamsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineParamsPb { + @JsonProperty("full_refresh") + private Boolean fullRefresh; + + public PipelineParamsPb setFullRefresh(Boolean fullRefresh) { + this.fullRefresh = fullRefresh; + return this; + } + + public Boolean getFullRefresh() { + return fullRefresh; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineParamsPb that = (PipelineParamsPb) o; + return Objects.equals(fullRefresh, that.fullRefresh); + } + + @Override + public int hashCode() { + return Objects.hash(fullRefresh); + } + + @Override + public String toString() { + return new ToStringer(PipelineParamsPb.class).add("fullRefresh", fullRefresh).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java index d7b279dd6..66d4f1fd4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineTask.PipelineTaskSerializer.class) +@JsonDeserialize(using = PipelineTask.PipelineTaskDeserializer.class) public class PipelineTask { /** If true, triggers a full refresh on the delta live table. */ - @JsonProperty("full_refresh") private Boolean fullRefresh; /** The full name of the pipeline task to execute. */ - @JsonProperty("pipeline_id") private String pipelineId; public PipelineTask setFullRefresh(Boolean fullRefresh) { @@ -56,4 +65,39 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + PipelineTaskPb toPb() { + PipelineTaskPb pb = new PipelineTaskPb(); + pb.setFullRefresh(fullRefresh); + pb.setPipelineId(pipelineId); + + return pb; + } + + static PipelineTask fromPb(PipelineTaskPb pb) { + PipelineTask model = new PipelineTask(); + model.setFullRefresh(pb.getFullRefresh()); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class PipelineTaskSerializer extends JsonSerializer { + @Override + public void serialize(PipelineTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineTaskDeserializer extends JsonDeserializer { + @Override + public PipelineTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineTaskPb pb = mapper.readValue(p, PipelineTaskPb.class); + return PipelineTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTaskPb.java new file mode 100755 index 000000000..d0c72c0f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTaskPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineTaskPb { + @JsonProperty("full_refresh") + private Boolean fullRefresh; + + @JsonProperty("pipeline_id") + private String pipelineId; + + public PipelineTaskPb setFullRefresh(Boolean fullRefresh) { + this.fullRefresh = fullRefresh; + return this; + } + + public Boolean getFullRefresh() { + return fullRefresh; + } + + public PipelineTaskPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineTaskPb that = (PipelineTaskPb) o; + return Objects.equals(fullRefresh, that.fullRefresh) + && Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(fullRefresh, pipelineId); + } + + @Override + public String toString() { + return new ToStringer(PipelineTaskPb.class) + .add("fullRefresh", fullRefresh) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java index 6319d399d..6221b502c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java @@ -21,7 +21,7 @@ public EnforcePolicyComplianceResponse enforceCompliance(EnforcePolicyCompliance String path = "/api/2.0/policies/jobs/enforce-compliance"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnforcePolicyComplianceResponse.class); @@ -35,7 +35,7 @@ public GetPolicyComplianceResponse getCompliance(GetPolicyComplianceRequest requ String path = "/api/2.0/policies/jobs/get-compliance"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPolicyComplianceResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ListJobComplianceForPolicyResponse listCompliance(ListJobComplianceReques String path = "/api/2.0/policies/jobs/list-compliance"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListJobComplianceForPolicyResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java index db64f341b..5866f6c54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PowerBiModel.PowerBiModelSerializer.class) +@JsonDeserialize(using = PowerBiModel.PowerBiModelDeserializer.class) public class PowerBiModel { /** How the published Power BI model authenticates to Databricks */ - @JsonProperty("authentication_method") private AuthenticationMethod authenticationMethod; /** The name of the Power BI model */ - @JsonProperty("model_name") private String modelName; /** Whether to overwrite existing Power BI models */ - @JsonProperty("overwrite_existing") private Boolean overwriteExisting; /** The default storage mode of the Power BI model */ - @JsonProperty("storage_mode") private StorageMode storageMode; /** The name of the Power BI workspace of the model */ - @JsonProperty("workspace_name") private String workspaceName; public PowerBiModel setAuthenticationMethod(AuthenticationMethod authenticationMethod) { @@ -102,4 +108,45 @@ public String toString() { .add("workspaceName", workspaceName) .toString(); } + + PowerBiModelPb toPb() { + PowerBiModelPb pb = new PowerBiModelPb(); + pb.setAuthenticationMethod(authenticationMethod); + pb.setModelName(modelName); + pb.setOverwriteExisting(overwriteExisting); + pb.setStorageMode(storageMode); + pb.setWorkspaceName(workspaceName); + + return pb; + } + + static PowerBiModel fromPb(PowerBiModelPb pb) { + PowerBiModel model = new PowerBiModel(); + model.setAuthenticationMethod(pb.getAuthenticationMethod()); + model.setModelName(pb.getModelName()); + model.setOverwriteExisting(pb.getOverwriteExisting()); + model.setStorageMode(pb.getStorageMode()); + model.setWorkspaceName(pb.getWorkspaceName()); + + return model; + } + + public static class PowerBiModelSerializer extends JsonSerializer { + @Override + public void serialize(PowerBiModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PowerBiModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PowerBiModelDeserializer extends JsonDeserializer { + @Override + public PowerBiModel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PowerBiModelPb pb = mapper.readValue(p, PowerBiModelPb.class); + return PowerBiModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModelPb.java new file mode 100755 index 000000000..765cf7477 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModelPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PowerBiModelPb { + @JsonProperty("authentication_method") + private AuthenticationMethod authenticationMethod; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("overwrite_existing") + private Boolean overwriteExisting; + + @JsonProperty("storage_mode") + private StorageMode storageMode; + + @JsonProperty("workspace_name") + private String workspaceName; + + public PowerBiModelPb setAuthenticationMethod(AuthenticationMethod authenticationMethod) { + this.authenticationMethod = authenticationMethod; + return this; + } + + public AuthenticationMethod getAuthenticationMethod() { + return authenticationMethod; + } + + public PowerBiModelPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public PowerBiModelPb setOverwriteExisting(Boolean overwriteExisting) { + this.overwriteExisting = overwriteExisting; + return this; + } + + public Boolean getOverwriteExisting() { + return overwriteExisting; + } + + public PowerBiModelPb setStorageMode(StorageMode storageMode) { + this.storageMode = storageMode; + return this; + } + + public StorageMode getStorageMode() { + return storageMode; + } + + public PowerBiModelPb setWorkspaceName(String workspaceName) { + this.workspaceName = workspaceName; + return this; + } + + public String getWorkspaceName() { + return workspaceName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiModelPb that = (PowerBiModelPb) o; + return Objects.equals(authenticationMethod, that.authenticationMethod) + && Objects.equals(modelName, that.modelName) + && Objects.equals(overwriteExisting, that.overwriteExisting) + && Objects.equals(storageMode, that.storageMode) + && Objects.equals(workspaceName, that.workspaceName); + } + + @Override + public int hashCode() { + return Objects.hash( + authenticationMethod, modelName, overwriteExisting, storageMode, workspaceName); + } + + @Override + public String toString() { + return new ToStringer(PowerBiModelPb.class) + .add("authenticationMethod", authenticationMethod) + .add("modelName", modelName) + .add("overwriteExisting", overwriteExisting) + .add("storageMode", storageMode) + .add("workspaceName", workspaceName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java index 1de4cb25d..5907aa0c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PowerBiTable.PowerBiTableSerializer.class) +@JsonDeserialize(using = PowerBiTable.PowerBiTableDeserializer.class) public class PowerBiTable { /** The catalog name in Databricks */ - @JsonProperty("catalog") private String catalog; /** The table name in Databricks */ - @JsonProperty("name") private String name; /** The schema name in Databricks */ - @JsonProperty("schema") private String schema; /** The Power BI storage mode of the table */ - @JsonProperty("storage_mode") private StorageMode storageMode; public PowerBiTable setCatalog(String catalog) { @@ -86,4 +93,43 @@ public String toString() { .add("storageMode", storageMode) .toString(); } + + PowerBiTablePb toPb() { + PowerBiTablePb pb = new PowerBiTablePb(); + pb.setCatalog(catalog); + pb.setName(name); + pb.setSchema(schema); + pb.setStorageMode(storageMode); + + return pb; + } + + static PowerBiTable fromPb(PowerBiTablePb pb) { + PowerBiTable model = new PowerBiTable(); + model.setCatalog(pb.getCatalog()); + model.setName(pb.getName()); + model.setSchema(pb.getSchema()); + model.setStorageMode(pb.getStorageMode()); + + return model; + } + + public static class PowerBiTableSerializer extends JsonSerializer { + @Override + public void serialize(PowerBiTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PowerBiTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PowerBiTableDeserializer extends JsonDeserializer { + @Override + public PowerBiTable deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PowerBiTablePb pb = mapper.readValue(p, PowerBiTablePb.class); + return PowerBiTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTablePb.java new file mode 100755 index 000000000..27ebafde9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTablePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PowerBiTablePb { + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("storage_mode") + private StorageMode storageMode; + + public PowerBiTablePb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public PowerBiTablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PowerBiTablePb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public PowerBiTablePb setStorageMode(StorageMode storageMode) { + this.storageMode = storageMode; + return this; + } + + public StorageMode getStorageMode() { + return storageMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiTablePb that = (PowerBiTablePb) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(storageMode, that.storageMode); + } + + @Override + public int hashCode() { + return Objects.hash(catalog, name, schema, storageMode); + } + + @Override + public String toString() { + return new ToStringer(PowerBiTablePb.class) + .add("catalog", catalog) + .add("name", name) + .add("schema", schema) + .add("storageMode", storageMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java index 53e0de86d..4e9a48d7b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PowerBiTask.PowerBiTaskSerializer.class) +@JsonDeserialize(using = PowerBiTask.PowerBiTaskDeserializer.class) public class PowerBiTask { /** The resource name of the UC connection to authenticate from Databricks to Power BI */ - @JsonProperty("connection_resource_name") private String connectionResourceName; /** The semantic model to update */ - @JsonProperty("power_bi_model") private PowerBiModel powerBiModel; /** Whether the model should be refreshed after the update */ - @JsonProperty("refresh_after_update") private Boolean refreshAfterUpdate; /** The tables to be exported to Power BI */ - @JsonProperty("tables") private Collection tables; /** The SQL warehouse ID to use as the Power BI data source */ - @JsonProperty("warehouse_id") private String warehouseId; public PowerBiTask setConnectionResourceName(String connectionResourceName) { @@ -103,4 +109,45 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + PowerBiTaskPb toPb() { + PowerBiTaskPb pb = new PowerBiTaskPb(); + pb.setConnectionResourceName(connectionResourceName); + pb.setPowerBiModel(powerBiModel); + pb.setRefreshAfterUpdate(refreshAfterUpdate); + pb.setTables(tables); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static PowerBiTask fromPb(PowerBiTaskPb pb) { + PowerBiTask model = new PowerBiTask(); + model.setConnectionResourceName(pb.getConnectionResourceName()); + model.setPowerBiModel(pb.getPowerBiModel()); + model.setRefreshAfterUpdate(pb.getRefreshAfterUpdate()); + model.setTables(pb.getTables()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class PowerBiTaskSerializer extends JsonSerializer { + @Override + public void serialize(PowerBiTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PowerBiTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PowerBiTaskDeserializer extends JsonDeserializer { + @Override + public PowerBiTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PowerBiTaskPb pb = mapper.readValue(p, PowerBiTaskPb.class); + return PowerBiTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTaskPb.java new file mode 100755 index 000000000..7b175344a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTaskPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PowerBiTaskPb { + @JsonProperty("connection_resource_name") + private String connectionResourceName; + + @JsonProperty("power_bi_model") + private PowerBiModel powerBiModel; + + @JsonProperty("refresh_after_update") + private Boolean refreshAfterUpdate; + + @JsonProperty("tables") + private Collection tables; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public PowerBiTaskPb setConnectionResourceName(String connectionResourceName) { + this.connectionResourceName = connectionResourceName; + return this; + } + + public String getConnectionResourceName() { + return connectionResourceName; + } + + public PowerBiTaskPb setPowerBiModel(PowerBiModel powerBiModel) { + this.powerBiModel = powerBiModel; + return this; + } + + public PowerBiModel getPowerBiModel() { + return powerBiModel; + } + + public PowerBiTaskPb setRefreshAfterUpdate(Boolean refreshAfterUpdate) { + this.refreshAfterUpdate = refreshAfterUpdate; + return this; + } + + public Boolean getRefreshAfterUpdate() { + return refreshAfterUpdate; + } + + public PowerBiTaskPb setTables(Collection tables) { + this.tables = tables; + return this; + } + + public Collection getTables() { + return tables; + } + + public PowerBiTaskPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiTaskPb that = (PowerBiTaskPb) o; + return Objects.equals(connectionResourceName, that.connectionResourceName) + && Objects.equals(powerBiModel, that.powerBiModel) + && Objects.equals(refreshAfterUpdate, that.refreshAfterUpdate) + && Objects.equals(tables, that.tables) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + connectionResourceName, powerBiModel, refreshAfterUpdate, tables, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(PowerBiTaskPb.class) + .add("connectionResourceName", connectionResourceName) + .add("powerBiModel", powerBiModel) + .add("refreshAfterUpdate", refreshAfterUpdate) + .add("tables", tables) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTask.java index 578345106..1cd77008a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTask.java @@ -4,36 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = PythonWheelTask.PythonWheelTaskSerializer.class) +@JsonDeserialize(using = PythonWheelTask.PythonWheelTaskDeserializer.class) public class PythonWheelTask { /** * Named entry point to use, if it does not exist in the metadata of the package it executes the * function from the package directly using `$packageName.$entryPoint()` */ - @JsonProperty("entry_point") private String entryPoint; /** * Command-line parameters passed to Python wheel task in the form of `["--name=task", * "--data=dbfs:/path/to/data.json"]`. Leave it empty if `parameters` is not null. */ - @JsonProperty("named_parameters") private Map namedParameters; /** Name of the package to execute */ - @JsonProperty("package_name") private String packageName; /** * Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is * not null. */ - @JsonProperty("parameters") private Collection parameters; public PythonWheelTask setEntryPoint(String entryPoint) { @@ -97,4 +104,44 @@ public String toString() { .add("parameters", parameters) .toString(); } + + PythonWheelTaskPb toPb() { + PythonWheelTaskPb pb = new PythonWheelTaskPb(); + pb.setEntryPoint(entryPoint); + pb.setNamedParameters(namedParameters); + pb.setPackageName(packageName); + pb.setParameters(parameters); + + return pb; + } + + static PythonWheelTask fromPb(PythonWheelTaskPb pb) { + PythonWheelTask model = new PythonWheelTask(); + model.setEntryPoint(pb.getEntryPoint()); + model.setNamedParameters(pb.getNamedParameters()); + model.setPackageName(pb.getPackageName()); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class PythonWheelTaskSerializer extends JsonSerializer { + @Override + public void serialize(PythonWheelTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PythonWheelTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PythonWheelTaskDeserializer extends JsonDeserializer { + @Override + public PythonWheelTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PythonWheelTaskPb pb = mapper.readValue(p, PythonWheelTaskPb.class); + return PythonWheelTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTaskPb.java new file mode 100755 index 000000000..85adcd18f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTaskPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class PythonWheelTaskPb { + @JsonProperty("entry_point") + private String entryPoint; + + @JsonProperty("named_parameters") + private Map namedParameters; + + @JsonProperty("package_name") + private String packageName; + + @JsonProperty("parameters") + private Collection parameters; + + public PythonWheelTaskPb setEntryPoint(String entryPoint) { + this.entryPoint = entryPoint; + return this; + } + + public String getEntryPoint() { + return entryPoint; + } + + public PythonWheelTaskPb setNamedParameters(Map namedParameters) { + this.namedParameters = namedParameters; + return this; + } + + public Map getNamedParameters() { + return namedParameters; + } + + public PythonWheelTaskPb setPackageName(String packageName) { + this.packageName = packageName; + return this; + } + + public String getPackageName() { + return packageName; + } + + public PythonWheelTaskPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PythonWheelTaskPb that = (PythonWheelTaskPb) o; + return Objects.equals(entryPoint, that.entryPoint) + && Objects.equals(namedParameters, that.namedParameters) + && Objects.equals(packageName, that.packageName) + && Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(entryPoint, namedParameters, packageName, parameters); + } + + @Override + public String toString() { + return new ToStringer(PythonWheelTaskPb.class) + .add("entryPoint", entryPoint) + .add("namedParameters", namedParameters) + .add("packageName", packageName) + .add("parameters", parameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java index f89c9be21..d776be9ae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QueueDetails.QueueDetailsSerializer.class) +@JsonDeserialize(using = QueueDetails.QueueDetailsDeserializer.class) public class QueueDetails { /** * The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to @@ -16,14 +27,12 @@ public class QueueDetails { * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of * active run job tasks. */ - @JsonProperty("code") private QueueDetailsCodeCode code; /** * A descriptive message with the queuing details. This field is unstructured, and its exact * format is subject to change. */ - @JsonProperty("message") private String message; public QueueDetails setCode(QueueDetailsCodeCode code) { @@ -61,4 +70,39 @@ public int hashCode() { public String toString() { return new ToStringer(QueueDetails.class).add("code", code).add("message", message).toString(); } + + QueueDetailsPb toPb() { + QueueDetailsPb pb = new QueueDetailsPb(); + pb.setCode(code); + pb.setMessage(message); + + return pb; + } + + static QueueDetails fromPb(QueueDetailsPb pb) { + QueueDetails model = new QueueDetails(); + model.setCode(pb.getCode()); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class QueueDetailsSerializer extends JsonSerializer { + @Override + public void serialize(QueueDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueueDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueueDetailsDeserializer extends JsonDeserializer { + @Override + public QueueDetails deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueueDetailsPb pb = mapper.readValue(p, QueueDetailsPb.class); + return QueueDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetailsPb.java new file mode 100755 index 000000000..4d4acd920 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetailsPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QueueDetailsPb { + @JsonProperty("code") + private QueueDetailsCodeCode code; + + @JsonProperty("message") + private String message; + + public QueueDetailsPb setCode(QueueDetailsCodeCode code) { + this.code = code; + return this; + } + + public QueueDetailsCodeCode getCode() { + return code; + } + + public QueueDetailsPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueueDetailsPb that = (QueueDetailsPb) o; + return Objects.equals(code, that.code) && Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(code, message); + } + + @Override + public String toString() { + return new ToStringer(QueueDetailsPb.class) + .add("code", code) + .add("message", message) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettings.java index bed657edc..62a6919c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettings.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QueueSettings.QueueSettingsSerializer.class) +@JsonDeserialize(using = QueueSettings.QueueSettingsDeserializer.class) public class QueueSettings { /** If true, enable queueing for the job. This is a required field. */ - @JsonProperty("enabled") private Boolean enabled; public QueueSettings setEnabled(Boolean enabled) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(QueueSettings.class).add("enabled", enabled).toString(); } + + QueueSettingsPb toPb() { + QueueSettingsPb pb = new QueueSettingsPb(); + pb.setEnabled(enabled); + + return pb; + } + + static QueueSettings fromPb(QueueSettingsPb pb) { + QueueSettings model = new QueueSettings(); + model.setEnabled(pb.getEnabled()); + + return model; + } + + public static class QueueSettingsSerializer extends JsonSerializer { + @Override + public void serialize(QueueSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueueSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueueSettingsDeserializer extends JsonDeserializer { + @Override + public QueueSettings deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueueSettingsPb pb = mapper.readValue(p, QueueSettingsPb.class); + return QueueSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettingsPb.java new file mode 100755 index 000000000..362f186aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueSettingsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QueueSettingsPb { + @JsonProperty("enabled") + private Boolean enabled; + + public QueueSettingsPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueueSettingsPb that = (QueueSettingsPb) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(QueueSettingsPb.class).add("enabled", enabled).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java index 24a8b911d..c89451abd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepairHistoryItem.RepairHistoryItemSerializer.class) +@JsonDeserialize(using = RepairHistoryItem.RepairHistoryItemDeserializer.class) public class RepairHistoryItem { /** * The actual performance target used by the serverless run during execution. This can differ from @@ -19,37 +30,29 @@ public class RepairHistoryItem { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; /** The end time of the (repaired) run. */ - @JsonProperty("end_time") private Long endTime; /** * The ID of the repair. Only returned for the items that represent a repair in `repair_history`. */ - @JsonProperty("id") private Long id; /** The start time of the (repaired) run. */ - @JsonProperty("start_time") private Long startTime; /** Deprecated. Please use the `status` field instead. */ - @JsonProperty("state") private RunState state; /** The current status of the run */ - @JsonProperty("status") private RunStatus status; /** The run IDs of the task runs that ran as part of this repair history item. */ - @JsonProperty("task_run_ids") private Collection taskRunIds; /** The repair history item type. Indicates whether a run is the original run or a repair run. */ - @JsonProperty("type") private RepairHistoryItemType typeValue; public RepairHistoryItem setEffectivePerformanceTarget( @@ -159,4 +162,52 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + RepairHistoryItemPb toPb() { + RepairHistoryItemPb pb = new RepairHistoryItemPb(); + pb.setEffectivePerformanceTarget(effectivePerformanceTarget); + pb.setEndTime(endTime); + pb.setId(id); + pb.setStartTime(startTime); + pb.setState(state); + pb.setStatus(status); + pb.setTaskRunIds(taskRunIds); + pb.setType(typeValue); + + return pb; + } + + static RepairHistoryItem fromPb(RepairHistoryItemPb pb) { + RepairHistoryItem model = new RepairHistoryItem(); + model.setEffectivePerformanceTarget(pb.getEffectivePerformanceTarget()); + model.setEndTime(pb.getEndTime()); + model.setId(pb.getId()); + model.setStartTime(pb.getStartTime()); + model.setState(pb.getState()); + model.setStatus(pb.getStatus()); + model.setTaskRunIds(pb.getTaskRunIds()); + model.setType(pb.getType()); + + return model; + } + + public static class RepairHistoryItemSerializer extends JsonSerializer { + @Override + public void serialize(RepairHistoryItem value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepairHistoryItemPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepairHistoryItemDeserializer extends JsonDeserializer { + @Override + public RepairHistoryItem deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepairHistoryItemPb pb = mapper.readValue(p, RepairHistoryItemPb.class); + return RepairHistoryItem.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItemPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItemPb.java new file mode 100755 index 000000000..c534fae66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItemPb.java @@ -0,0 +1,144 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepairHistoryItemPb { + @JsonProperty("effective_performance_target") + private PerformanceTarget effectivePerformanceTarget; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("id") + private Long id; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("state") + private RunState state; + + @JsonProperty("status") + private RunStatus status; + + @JsonProperty("task_run_ids") + private Collection taskRunIds; + + @JsonProperty("type") + private RepairHistoryItemType typeValue; + + public RepairHistoryItemPb setEffectivePerformanceTarget( + PerformanceTarget effectivePerformanceTarget) { + this.effectivePerformanceTarget = effectivePerformanceTarget; + return this; + } + + public PerformanceTarget getEffectivePerformanceTarget() { + return effectivePerformanceTarget; + } + + public RepairHistoryItemPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public RepairHistoryItemPb setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public RepairHistoryItemPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public RepairHistoryItemPb setState(RunState state) { + this.state = state; + return this; + } + + public RunState getState() { + return state; + } + + public RepairHistoryItemPb setStatus(RunStatus status) { + this.status = status; + return this; + } + + public RunStatus getStatus() { + return status; + } + + public RepairHistoryItemPb setTaskRunIds(Collection taskRunIds) { + this.taskRunIds = taskRunIds; + return this; + } + + public Collection getTaskRunIds() { + return taskRunIds; + } + + public RepairHistoryItemPb setType(RepairHistoryItemType typeValue) { + this.typeValue = typeValue; + return this; + } + + public RepairHistoryItemType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepairHistoryItemPb that = (RepairHistoryItemPb) o; + return Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget) + && Objects.equals(endTime, that.endTime) + && Objects.equals(id, that.id) + && Objects.equals(startTime, that.startTime) + && Objects.equals(state, that.state) + && Objects.equals(status, that.status) + && Objects.equals(taskRunIds, that.taskRunIds) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash( + effectivePerformanceTarget, endTime, id, startTime, state, status, taskRunIds, typeValue); + } + + @Override + public String toString() { + return new ToStringer(RepairHistoryItemPb.class) + .add("effectivePerformanceTarget", effectivePerformanceTarget) + .add("endTime", endTime) + .add("id", id) + .add("startTime", startTime) + .add("state", state) + .add("status", status) + .add("taskRunIds", taskRunIds) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java index 450831d23..64ad8dfbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = RepairRun.RepairRunSerializer.class) +@JsonDeserialize(using = RepairRun.RepairRunDeserializer.class) public class RepairRun { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ - @JsonProperty("dbt_commands") private Collection dbtCommands; /** @@ -29,18 +39,15 @@ public class RepairRun { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("jar_params") private Collection jarParams; /** Job-level parameters used in the run. for example `"param": "overriding_val"` */ - @JsonProperty("job_parameters") private Map jobParameters; /** * The ID of the latest repair. This parameter is not required when repairing a run for the first * time, but must be provided on subsequent requests to repair the same run. */ - @JsonProperty("latest_repair_id") private Long latestRepairId; /** @@ -60,7 +67,6 @@ public class RepairRun { *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ - @JsonProperty("notebook_params") private Map notebookParams; /** @@ -72,15 +78,12 @@ public class RepairRun { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("performance_target") private PerformanceTarget performanceTarget; /** Controls whether the pipeline should perform a full refresh */ - @JsonProperty("pipeline_params") private PipelineParams pipelineParams; /** */ - @JsonProperty("python_named_params") private Map pythonNamedParams; /** @@ -99,29 +102,24 @@ public class RepairRun { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("python_params") private Collection pythonParams; /** * If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be * used. */ - @JsonProperty("rerun_all_failed_tasks") private Boolean rerunAllFailedTasks; /** * If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were * previously successful. Can be also used in combination with `rerun_all_failed_tasks`. */ - @JsonProperty("rerun_dependent_tasks") private Boolean rerunDependentTasks; /** The task keys of the task runs to repair. */ - @JsonProperty("rerun_tasks") private Collection rerunTasks; /** The job run ID of the run to repair. The run must not be in progress. */ - @JsonProperty("run_id") private Long runId; /** @@ -141,14 +139,12 @@ public class RepairRun { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; /** * A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john * doe", "age": "35"}`. The SQL alert task does not support custom parameters. */ - @JsonProperty("sql_params") private Map sqlParams; public RepairRun setDbtCommands(Collection dbtCommands) { @@ -348,4 +344,65 @@ public String toString() { .add("sqlParams", sqlParams) .toString(); } + + RepairRunPb toPb() { + RepairRunPb pb = new RepairRunPb(); + pb.setDbtCommands(dbtCommands); + pb.setJarParams(jarParams); + pb.setJobParameters(jobParameters); + pb.setLatestRepairId(latestRepairId); + pb.setNotebookParams(notebookParams); + pb.setPerformanceTarget(performanceTarget); + pb.setPipelineParams(pipelineParams); + pb.setPythonNamedParams(pythonNamedParams); + pb.setPythonParams(pythonParams); + pb.setRerunAllFailedTasks(rerunAllFailedTasks); + pb.setRerunDependentTasks(rerunDependentTasks); + pb.setRerunTasks(rerunTasks); + pb.setRunId(runId); + pb.setSparkSubmitParams(sparkSubmitParams); + pb.setSqlParams(sqlParams); + + return pb; + } + + static RepairRun fromPb(RepairRunPb pb) { + RepairRun model = new RepairRun(); + model.setDbtCommands(pb.getDbtCommands()); + model.setJarParams(pb.getJarParams()); + model.setJobParameters(pb.getJobParameters()); + model.setLatestRepairId(pb.getLatestRepairId()); + model.setNotebookParams(pb.getNotebookParams()); + model.setPerformanceTarget(pb.getPerformanceTarget()); + model.setPipelineParams(pb.getPipelineParams()); + model.setPythonNamedParams(pb.getPythonNamedParams()); + model.setPythonParams(pb.getPythonParams()); + model.setRerunAllFailedTasks(pb.getRerunAllFailedTasks()); + model.setRerunDependentTasks(pb.getRerunDependentTasks()); + model.setRerunTasks(pb.getRerunTasks()); + model.setRunId(pb.getRunId()); + model.setSparkSubmitParams(pb.getSparkSubmitParams()); + model.setSqlParams(pb.getSqlParams()); + + return model; + } + + public static class RepairRunSerializer extends JsonSerializer { + @Override + public void serialize(RepairRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepairRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepairRunDeserializer extends JsonDeserializer { + @Override + public RepairRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepairRunPb pb = mapper.readValue(p, RepairRunPb.class); + return RepairRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunPb.java new file mode 100755 index 000000000..b774193e5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunPb.java @@ -0,0 +1,256 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class RepairRunPb { + @JsonProperty("dbt_commands") + private Collection dbtCommands; + + @JsonProperty("jar_params") + private Collection jarParams; + + @JsonProperty("job_parameters") + private Map jobParameters; + + @JsonProperty("latest_repair_id") + private Long latestRepairId; + + @JsonProperty("notebook_params") + private Map notebookParams; + + @JsonProperty("performance_target") + private PerformanceTarget performanceTarget; + + @JsonProperty("pipeline_params") + private PipelineParams pipelineParams; + + @JsonProperty("python_named_params") + private Map pythonNamedParams; + + @JsonProperty("python_params") + private Collection pythonParams; + + @JsonProperty("rerun_all_failed_tasks") + private Boolean rerunAllFailedTasks; + + @JsonProperty("rerun_dependent_tasks") + private Boolean rerunDependentTasks; + + @JsonProperty("rerun_tasks") + private Collection rerunTasks; + + @JsonProperty("run_id") + private Long runId; + + @JsonProperty("spark_submit_params") + private Collection sparkSubmitParams; + + @JsonProperty("sql_params") + private Map sqlParams; + + public RepairRunPb setDbtCommands(Collection dbtCommands) { + this.dbtCommands = dbtCommands; + return this; + } + + public Collection getDbtCommands() { + return dbtCommands; + } + + public RepairRunPb setJarParams(Collection jarParams) { + this.jarParams = jarParams; + return this; + } + + public Collection getJarParams() { + return jarParams; + } + + public RepairRunPb setJobParameters(Map jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Map getJobParameters() { + return jobParameters; + } + + public RepairRunPb setLatestRepairId(Long latestRepairId) { + this.latestRepairId = latestRepairId; + return this; + } + + public Long getLatestRepairId() { + return latestRepairId; + } + + public RepairRunPb setNotebookParams(Map notebookParams) { + this.notebookParams = notebookParams; + return this; + } + + public Map getNotebookParams() { + return notebookParams; + } + + public RepairRunPb setPerformanceTarget(PerformanceTarget performanceTarget) { + this.performanceTarget = performanceTarget; + return this; + } + + public PerformanceTarget getPerformanceTarget() { + return performanceTarget; + } + + public RepairRunPb setPipelineParams(PipelineParams pipelineParams) { + this.pipelineParams = pipelineParams; + return this; + } + + public PipelineParams getPipelineParams() { + return pipelineParams; + } + + public RepairRunPb setPythonNamedParams(Map pythonNamedParams) { + this.pythonNamedParams = pythonNamedParams; + return this; + } + + public Map getPythonNamedParams() { + return pythonNamedParams; + } + + public RepairRunPb setPythonParams(Collection pythonParams) { + this.pythonParams = pythonParams; + return this; + } + + public Collection getPythonParams() { + return pythonParams; + } + + public RepairRunPb setRerunAllFailedTasks(Boolean rerunAllFailedTasks) { + this.rerunAllFailedTasks = rerunAllFailedTasks; + return this; + } + + public Boolean getRerunAllFailedTasks() { + return rerunAllFailedTasks; + } + + public RepairRunPb setRerunDependentTasks(Boolean rerunDependentTasks) { + this.rerunDependentTasks = rerunDependentTasks; + return this; + } + + public Boolean getRerunDependentTasks() { + return rerunDependentTasks; + } + + public RepairRunPb setRerunTasks(Collection rerunTasks) { + this.rerunTasks = rerunTasks; + return this; + } + + public Collection getRerunTasks() { + return rerunTasks; + } + + public RepairRunPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + public RepairRunPb setSparkSubmitParams(Collection sparkSubmitParams) { + this.sparkSubmitParams = sparkSubmitParams; + return this; + } + + public Collection getSparkSubmitParams() { + return sparkSubmitParams; + } + + public RepairRunPb setSqlParams(Map sqlParams) { + this.sqlParams = sqlParams; + return this; + } + + public Map getSqlParams() { + return sqlParams; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepairRunPb that = (RepairRunPb) o; + return Objects.equals(dbtCommands, that.dbtCommands) + && Objects.equals(jarParams, that.jarParams) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(latestRepairId, that.latestRepairId) + && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(performanceTarget, that.performanceTarget) + && Objects.equals(pipelineParams, that.pipelineParams) + && Objects.equals(pythonNamedParams, that.pythonNamedParams) + && Objects.equals(pythonParams, that.pythonParams) + && Objects.equals(rerunAllFailedTasks, that.rerunAllFailedTasks) + && Objects.equals(rerunDependentTasks, that.rerunDependentTasks) + && Objects.equals(rerunTasks, that.rerunTasks) + && Objects.equals(runId, that.runId) + && Objects.equals(sparkSubmitParams, that.sparkSubmitParams) + && Objects.equals(sqlParams, that.sqlParams); + } + + @Override + public int hashCode() { + return Objects.hash( + dbtCommands, + jarParams, + jobParameters, + latestRepairId, + notebookParams, + performanceTarget, + pipelineParams, + pythonNamedParams, + pythonParams, + rerunAllFailedTasks, + rerunDependentTasks, + rerunTasks, + runId, + sparkSubmitParams, + sqlParams); + } + + @Override + public String toString() { + return new ToStringer(RepairRunPb.class) + .add("dbtCommands", dbtCommands) + .add("jarParams", jarParams) + .add("jobParameters", jobParameters) + .add("latestRepairId", latestRepairId) + .add("notebookParams", notebookParams) + .add("performanceTarget", performanceTarget) + .add("pipelineParams", pipelineParams) + .add("pythonNamedParams", pythonNamedParams) + .add("pythonParams", pythonParams) + .add("rerunAllFailedTasks", rerunAllFailedTasks) + .add("rerunDependentTasks", rerunDependentTasks) + .add("rerunTasks", rerunTasks) + .add("runId", runId) + .add("sparkSubmitParams", sparkSubmitParams) + .add("sqlParams", sqlParams) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java index abfe23597..8e4b12fdf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Run repair was initiated. */ @Generated +@JsonSerialize(using = RepairRunResponse.RepairRunResponseSerializer.class) +@JsonDeserialize(using = RepairRunResponse.RepairRunResponseDeserializer.class) public class RepairRunResponse { /** * The ID of the repair. Must be provided in subsequent repairs using the `latest_repair_id` field * to ensure sequential repairs. */ - @JsonProperty("repair_id") private Long repairId; public RepairRunResponse setRepairId(Long repairId) { @@ -43,4 +53,38 @@ public int hashCode() { public String toString() { return new ToStringer(RepairRunResponse.class).add("repairId", repairId).toString(); } + + RepairRunResponsePb toPb() { + RepairRunResponsePb pb = new RepairRunResponsePb(); + pb.setRepairId(repairId); + + return pb; + } + + static RepairRunResponse fromPb(RepairRunResponsePb pb) { + RepairRunResponse model = new RepairRunResponse(); + model.setRepairId(pb.getRepairId()); + + return model; + } + + public static class RepairRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(RepairRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepairRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepairRunResponseDeserializer extends JsonDeserializer { + @Override + public RepairRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepairRunResponsePb pb = mapper.readValue(p, RepairRunResponsePb.class); + return RepairRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponsePb.java new file mode 100755 index 000000000..af2f7c370 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Run repair was initiated. */ +@Generated +class RepairRunResponsePb { + @JsonProperty("repair_id") + private Long repairId; + + public RepairRunResponsePb setRepairId(Long repairId) { + this.repairId = repairId; + return this; + } + + public Long getRepairId() { + return repairId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepairRunResponsePb that = (RepairRunResponsePb) o; + return Objects.equals(repairId, that.repairId); + } + + @Override + public int hashCode() { + return Objects.hash(repairId); + } + + @Override + public String toString() { + return new ToStringer(RepairRunResponsePb.class).add("repairId", repairId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java index 371fb882a..88136108e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResetJob.ResetJobSerializer.class) +@JsonDeserialize(using = ResetJob.ResetJobDeserializer.class) public class ResetJob { /** The canonical identifier of the job to reset. This field is required. */ - @JsonProperty("job_id") private Long jobId; /** @@ -19,7 +29,6 @@ public class ResetJob { *

Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes * to other fields are applied to future runs only. */ - @JsonProperty("new_settings") private JobSettings newSettings; public ResetJob setJobId(Long jobId) { @@ -60,4 +69,39 @@ public String toString() { .add("newSettings", newSettings) .toString(); } + + ResetJobPb toPb() { + ResetJobPb pb = new ResetJobPb(); + pb.setJobId(jobId); + pb.setNewSettings(newSettings); + + return pb; + } + + static ResetJob fromPb(ResetJobPb pb) { + ResetJob model = new ResetJob(); + model.setJobId(pb.getJobId()); + model.setNewSettings(pb.getNewSettings()); + + return model; + } + + public static class ResetJobSerializer extends JsonSerializer { + @Override + public void serialize(ResetJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResetJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResetJobDeserializer extends JsonDeserializer { + @Override + public ResetJob deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResetJobPb pb = mapper.readValue(p, ResetJobPb.class); + return ResetJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJobPb.java new file mode 100755 index 000000000..b599f5e3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJobPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResetJobPb { + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("new_settings") + private JobSettings newSettings; + + public ResetJobPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public ResetJobPb setNewSettings(JobSettings newSettings) { + this.newSettings = newSettings; + return this; + } + + public JobSettings getNewSettings() { + return newSettings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResetJobPb that = (ResetJobPb) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(newSettings, that.newSettings); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, newSettings); + } + + @Override + public String toString() { + return new ToStringer(ResetJobPb.class) + .add("jobId", jobId) + .add("newSettings", newSettings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java index 34127047c..7f8fbd93c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResetResponse.ResetResponseSerializer.class) +@JsonDeserialize(using = ResetResponse.ResetResponseDeserializer.class) public class ResetResponse { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(ResetResponse.class).toString(); } + + ResetResponsePb toPb() { + ResetResponsePb pb = new ResetResponsePb(); + + return pb; + } + + static ResetResponse fromPb(ResetResponsePb pb) { + ResetResponse model = new ResetResponse(); + + return model; + } + + public static class ResetResponseSerializer extends JsonSerializer { + @Override + public void serialize(ResetResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResetResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResetResponseDeserializer extends JsonDeserializer { + @Override + public ResetResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResetResponsePb pb = mapper.readValue(p, ResetResponsePb.class); + return ResetResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponsePb.java new file mode 100755 index 000000000..4c9300371 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ResetResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ResetResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java index efdd63d20..31ce64ad7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedConditionTaskValues.ResolvedConditionTaskValuesSerializer.class) +@JsonDeserialize(using = ResolvedConditionTaskValues.ResolvedConditionTaskValuesDeserializer.class) public class ResolvedConditionTaskValues { /** */ - @JsonProperty("left") private String left; /** */ - @JsonProperty("right") private String right; public ResolvedConditionTaskValues setLeft(String left) { @@ -55,4 +64,43 @@ public String toString() { .add("right", right) .toString(); } + + ResolvedConditionTaskValuesPb toPb() { + ResolvedConditionTaskValuesPb pb = new ResolvedConditionTaskValuesPb(); + pb.setLeft(left); + pb.setRight(right); + + return pb; + } + + static ResolvedConditionTaskValues fromPb(ResolvedConditionTaskValuesPb pb) { + ResolvedConditionTaskValues model = new ResolvedConditionTaskValues(); + model.setLeft(pb.getLeft()); + model.setRight(pb.getRight()); + + return model; + } + + public static class ResolvedConditionTaskValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedConditionTaskValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedConditionTaskValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedConditionTaskValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedConditionTaskValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedConditionTaskValuesPb pb = mapper.readValue(p, ResolvedConditionTaskValuesPb.class); + return ResolvedConditionTaskValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValuesPb.java new file mode 100755 index 000000000..3fa34b687 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValuesPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResolvedConditionTaskValuesPb { + @JsonProperty("left") + private String left; + + @JsonProperty("right") + private String right; + + public ResolvedConditionTaskValuesPb setLeft(String left) { + this.left = left; + return this; + } + + public String getLeft() { + return left; + } + + public ResolvedConditionTaskValuesPb setRight(String right) { + this.right = right; + return this; + } + + public String getRight() { + return right; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedConditionTaskValuesPb that = (ResolvedConditionTaskValuesPb) o; + return Objects.equals(left, that.left) && Objects.equals(right, that.right); + } + + @Override + public int hashCode() { + return Objects.hash(left, right); + } + + @Override + public String toString() { + return new ToStringer(ResolvedConditionTaskValuesPb.class) + .add("left", left) + .add("right", right) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java index 30c913656..79e13b636 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedDbtTaskValues.ResolvedDbtTaskValuesSerializer.class) +@JsonDeserialize(using = ResolvedDbtTaskValues.ResolvedDbtTaskValuesDeserializer.class) public class ResolvedDbtTaskValues { /** */ - @JsonProperty("commands") private Collection commands; public ResolvedDbtTaskValues setCommands(Collection commands) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ResolvedDbtTaskValues.class).add("commands", commands).toString(); } + + ResolvedDbtTaskValuesPb toPb() { + ResolvedDbtTaskValuesPb pb = new ResolvedDbtTaskValuesPb(); + pb.setCommands(commands); + + return pb; + } + + static ResolvedDbtTaskValues fromPb(ResolvedDbtTaskValuesPb pb) { + ResolvedDbtTaskValues model = new ResolvedDbtTaskValues(); + model.setCommands(pb.getCommands()); + + return model; + } + + public static class ResolvedDbtTaskValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedDbtTaskValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedDbtTaskValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedDbtTaskValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedDbtTaskValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedDbtTaskValuesPb pb = mapper.readValue(p, ResolvedDbtTaskValuesPb.class); + return ResolvedDbtTaskValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValuesPb.java new file mode 100755 index 000000000..65ce7cc04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValuesPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ResolvedDbtTaskValuesPb { + @JsonProperty("commands") + private Collection commands; + + public ResolvedDbtTaskValuesPb setCommands(Collection commands) { + this.commands = commands; + return this; + } + + public Collection getCommands() { + return commands; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedDbtTaskValuesPb that = (ResolvedDbtTaskValuesPb) o; + return Objects.equals(commands, that.commands); + } + + @Override + public int hashCode() { + return Objects.hash(commands); + } + + @Override + public String toString() { + return new ToStringer(ResolvedDbtTaskValuesPb.class).add("commands", commands).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java index 19d6d72b3..8b5eabc81 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedNotebookTaskValues.ResolvedNotebookTaskValuesSerializer.class) +@JsonDeserialize(using = ResolvedNotebookTaskValues.ResolvedNotebookTaskValuesDeserializer.class) public class ResolvedNotebookTaskValues { /** */ - @JsonProperty("base_parameters") private Map baseParameters; public ResolvedNotebookTaskValues setBaseParameters(Map baseParameters) { @@ -42,4 +52,41 @@ public String toString() { .add("baseParameters", baseParameters) .toString(); } + + ResolvedNotebookTaskValuesPb toPb() { + ResolvedNotebookTaskValuesPb pb = new ResolvedNotebookTaskValuesPb(); + pb.setBaseParameters(baseParameters); + + return pb; + } + + static ResolvedNotebookTaskValues fromPb(ResolvedNotebookTaskValuesPb pb) { + ResolvedNotebookTaskValues model = new ResolvedNotebookTaskValues(); + model.setBaseParameters(pb.getBaseParameters()); + + return model; + } + + public static class ResolvedNotebookTaskValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedNotebookTaskValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedNotebookTaskValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedNotebookTaskValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedNotebookTaskValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedNotebookTaskValuesPb pb = mapper.readValue(p, ResolvedNotebookTaskValuesPb.class); + return ResolvedNotebookTaskValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValuesPb.java new file mode 100755 index 000000000..82e41ba41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValuesPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ResolvedNotebookTaskValuesPb { + @JsonProperty("base_parameters") + private Map baseParameters; + + public ResolvedNotebookTaskValuesPb setBaseParameters(Map baseParameters) { + this.baseParameters = baseParameters; + return this; + } + + public Map getBaseParameters() { + return baseParameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedNotebookTaskValuesPb that = (ResolvedNotebookTaskValuesPb) o; + return Objects.equals(baseParameters, that.baseParameters); + } + + @Override + public int hashCode() { + return Objects.hash(baseParameters); + } + + @Override + public String toString() { + return new ToStringer(ResolvedNotebookTaskValuesPb.class) + .add("baseParameters", baseParameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValues.java index 13b8dc6ea..19eee4d81 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValues.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedParamPairValues.ResolvedParamPairValuesSerializer.class) +@JsonDeserialize(using = ResolvedParamPairValues.ResolvedParamPairValuesDeserializer.class) public class ResolvedParamPairValues { /** */ - @JsonProperty("parameters") private Map parameters; public ResolvedParamPairValues setParameters(Map parameters) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ResolvedParamPairValues.class).add("parameters", parameters).toString(); } + + ResolvedParamPairValuesPb toPb() { + ResolvedParamPairValuesPb pb = new ResolvedParamPairValuesPb(); + pb.setParameters(parameters); + + return pb; + } + + static ResolvedParamPairValues fromPb(ResolvedParamPairValuesPb pb) { + ResolvedParamPairValues model = new ResolvedParamPairValues(); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class ResolvedParamPairValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedParamPairValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedParamPairValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedParamPairValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedParamPairValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedParamPairValuesPb pb = mapper.readValue(p, ResolvedParamPairValuesPb.class); + return ResolvedParamPairValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValuesPb.java new file mode 100755 index 000000000..6940906a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedParamPairValuesPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ResolvedParamPairValuesPb { + @JsonProperty("parameters") + private Map parameters; + + public ResolvedParamPairValuesPb setParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + public Map getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedParamPairValuesPb that = (ResolvedParamPairValuesPb) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(ResolvedParamPairValuesPb.class).add("parameters", parameters).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValues.java index bb71e27b7..2dbcd3255 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValues.java @@ -4,19 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedPythonWheelTaskValues.ResolvedPythonWheelTaskValuesSerializer.class) +@JsonDeserialize( + using = ResolvedPythonWheelTaskValues.ResolvedPythonWheelTaskValuesDeserializer.class) public class ResolvedPythonWheelTaskValues { /** */ - @JsonProperty("named_parameters") private Map namedParameters; /** */ - @JsonProperty("parameters") private Collection parameters; public ResolvedPythonWheelTaskValues setNamedParameters(Map namedParameters) { @@ -58,4 +68,44 @@ public String toString() { .add("parameters", parameters) .toString(); } + + ResolvedPythonWheelTaskValuesPb toPb() { + ResolvedPythonWheelTaskValuesPb pb = new ResolvedPythonWheelTaskValuesPb(); + pb.setNamedParameters(namedParameters); + pb.setParameters(parameters); + + return pb; + } + + static ResolvedPythonWheelTaskValues fromPb(ResolvedPythonWheelTaskValuesPb pb) { + ResolvedPythonWheelTaskValues model = new ResolvedPythonWheelTaskValues(); + model.setNamedParameters(pb.getNamedParameters()); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class ResolvedPythonWheelTaskValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedPythonWheelTaskValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedPythonWheelTaskValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedPythonWheelTaskValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedPythonWheelTaskValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedPythonWheelTaskValuesPb pb = + mapper.readValue(p, ResolvedPythonWheelTaskValuesPb.class); + return ResolvedPythonWheelTaskValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValuesPb.java new file mode 100755 index 000000000..71bdcebba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedPythonWheelTaskValuesPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class ResolvedPythonWheelTaskValuesPb { + @JsonProperty("named_parameters") + private Map namedParameters; + + @JsonProperty("parameters") + private Collection parameters; + + public ResolvedPythonWheelTaskValuesPb setNamedParameters(Map namedParameters) { + this.namedParameters = namedParameters; + return this; + } + + public Map getNamedParameters() { + return namedParameters; + } + + public ResolvedPythonWheelTaskValuesPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedPythonWheelTaskValuesPb that = (ResolvedPythonWheelTaskValuesPb) o; + return Objects.equals(namedParameters, that.namedParameters) + && Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(namedParameters, parameters); + } + + @Override + public String toString() { + return new ToStringer(ResolvedPythonWheelTaskValuesPb.class) + .add("namedParameters", namedParameters) + .add("parameters", parameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java index 508176877..7061e9077 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedRunJobTaskValues.ResolvedRunJobTaskValuesSerializer.class) +@JsonDeserialize(using = ResolvedRunJobTaskValues.ResolvedRunJobTaskValuesDeserializer.class) public class ResolvedRunJobTaskValues { /** */ - @JsonProperty("job_parameters") private Map jobParameters; /** */ - @JsonProperty("parameters") private Map parameters; public ResolvedRunJobTaskValues setJobParameters(Map jobParameters) { @@ -57,4 +66,43 @@ public String toString() { .add("parameters", parameters) .toString(); } + + ResolvedRunJobTaskValuesPb toPb() { + ResolvedRunJobTaskValuesPb pb = new ResolvedRunJobTaskValuesPb(); + pb.setJobParameters(jobParameters); + pb.setParameters(parameters); + + return pb; + } + + static ResolvedRunJobTaskValues fromPb(ResolvedRunJobTaskValuesPb pb) { + ResolvedRunJobTaskValues model = new ResolvedRunJobTaskValues(); + model.setJobParameters(pb.getJobParameters()); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class ResolvedRunJobTaskValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedRunJobTaskValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedRunJobTaskValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedRunJobTaskValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedRunJobTaskValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedRunJobTaskValuesPb pb = mapper.readValue(p, ResolvedRunJobTaskValuesPb.class); + return ResolvedRunJobTaskValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValuesPb.java new file mode 100755 index 000000000..46445a09c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValuesPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ResolvedRunJobTaskValuesPb { + @JsonProperty("job_parameters") + private Map jobParameters; + + @JsonProperty("parameters") + private Map parameters; + + public ResolvedRunJobTaskValuesPb setJobParameters(Map jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Map getJobParameters() { + return jobParameters; + } + + public ResolvedRunJobTaskValuesPb setParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + public Map getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedRunJobTaskValuesPb that = (ResolvedRunJobTaskValuesPb) o; + return Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(jobParameters, parameters); + } + + @Override + public String toString() { + return new ToStringer(ResolvedRunJobTaskValuesPb.class) + .add("jobParameters", jobParameters) + .add("parameters", parameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValues.java index d4c45e29d..c2a0fc931 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValues.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedStringParamsValues.ResolvedStringParamsValuesSerializer.class) +@JsonDeserialize(using = ResolvedStringParamsValues.ResolvedStringParamsValuesDeserializer.class) public class ResolvedStringParamsValues { /** */ - @JsonProperty("parameters") private Collection parameters; public ResolvedStringParamsValues setParameters(Collection parameters) { @@ -42,4 +52,41 @@ public String toString() { .add("parameters", parameters) .toString(); } + + ResolvedStringParamsValuesPb toPb() { + ResolvedStringParamsValuesPb pb = new ResolvedStringParamsValuesPb(); + pb.setParameters(parameters); + + return pb; + } + + static ResolvedStringParamsValues fromPb(ResolvedStringParamsValuesPb pb) { + ResolvedStringParamsValues model = new ResolvedStringParamsValues(); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class ResolvedStringParamsValuesSerializer + extends JsonSerializer { + @Override + public void serialize( + ResolvedStringParamsValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedStringParamsValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedStringParamsValuesDeserializer + extends JsonDeserializer { + @Override + public ResolvedStringParamsValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedStringParamsValuesPb pb = mapper.readValue(p, ResolvedStringParamsValuesPb.class); + return ResolvedStringParamsValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValuesPb.java new file mode 100755 index 000000000..ed4a92bab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedStringParamsValuesPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ResolvedStringParamsValuesPb { + @JsonProperty("parameters") + private Collection parameters; + + public ResolvedStringParamsValuesPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedStringParamsValuesPb that = (ResolvedStringParamsValuesPb) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(ResolvedStringParamsValuesPb.class) + .add("parameters", parameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValues.java index f0c2f8db0..d21fabd61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValues.java @@ -4,49 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ResolvedValues.ResolvedValuesSerializer.class) +@JsonDeserialize(using = ResolvedValues.ResolvedValuesDeserializer.class) public class ResolvedValues { /** */ - @JsonProperty("condition_task") private ResolvedConditionTaskValues conditionTask; /** */ - @JsonProperty("dbt_task") private ResolvedDbtTaskValues dbtTask; /** */ - @JsonProperty("notebook_task") private ResolvedNotebookTaskValues notebookTask; /** */ - @JsonProperty("python_wheel_task") private ResolvedPythonWheelTaskValues pythonWheelTask; /** */ - @JsonProperty("run_job_task") private ResolvedRunJobTaskValues runJobTask; /** */ - @JsonProperty("simulation_task") private ResolvedParamPairValues simulationTask; /** */ - @JsonProperty("spark_jar_task") private ResolvedStringParamsValues sparkJarTask; /** */ - @JsonProperty("spark_python_task") private ResolvedStringParamsValues sparkPythonTask; /** */ - @JsonProperty("spark_submit_task") private ResolvedStringParamsValues sparkSubmitTask; /** */ - @JsonProperty("sql_task") private ResolvedParamPairValues sqlTask; public ResolvedValues setConditionTask(ResolvedConditionTaskValues conditionTask) { @@ -186,4 +187,56 @@ public String toString() { .add("sqlTask", sqlTask) .toString(); } + + ResolvedValuesPb toPb() { + ResolvedValuesPb pb = new ResolvedValuesPb(); + pb.setConditionTask(conditionTask); + pb.setDbtTask(dbtTask); + pb.setNotebookTask(notebookTask); + pb.setPythonWheelTask(pythonWheelTask); + pb.setRunJobTask(runJobTask); + pb.setSimulationTask(simulationTask); + pb.setSparkJarTask(sparkJarTask); + pb.setSparkPythonTask(sparkPythonTask); + pb.setSparkSubmitTask(sparkSubmitTask); + pb.setSqlTask(sqlTask); + + return pb; + } + + static ResolvedValues fromPb(ResolvedValuesPb pb) { + ResolvedValues model = new ResolvedValues(); + model.setConditionTask(pb.getConditionTask()); + model.setDbtTask(pb.getDbtTask()); + model.setNotebookTask(pb.getNotebookTask()); + model.setPythonWheelTask(pb.getPythonWheelTask()); + model.setRunJobTask(pb.getRunJobTask()); + model.setSimulationTask(pb.getSimulationTask()); + model.setSparkJarTask(pb.getSparkJarTask()); + model.setSparkPythonTask(pb.getSparkPythonTask()); + model.setSparkSubmitTask(pb.getSparkSubmitTask()); + model.setSqlTask(pb.getSqlTask()); + + return model; + } + + public static class ResolvedValuesSerializer extends JsonSerializer { + @Override + public void serialize(ResolvedValues value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResolvedValuesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResolvedValuesDeserializer extends JsonDeserializer { + @Override + public ResolvedValues deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResolvedValuesPb pb = mapper.readValue(p, ResolvedValuesPb.class); + return ResolvedValues.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValuesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValuesPb.java new file mode 100755 index 000000000..7f3ca9f8f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValuesPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ResolvedValuesPb { + @JsonProperty("condition_task") + private ResolvedConditionTaskValues conditionTask; + + @JsonProperty("dbt_task") + private ResolvedDbtTaskValues dbtTask; + + @JsonProperty("notebook_task") + private ResolvedNotebookTaskValues notebookTask; + + @JsonProperty("python_wheel_task") + private ResolvedPythonWheelTaskValues pythonWheelTask; + + @JsonProperty("run_job_task") + private ResolvedRunJobTaskValues runJobTask; + + @JsonProperty("simulation_task") + private ResolvedParamPairValues simulationTask; + + @JsonProperty("spark_jar_task") + private ResolvedStringParamsValues sparkJarTask; + + @JsonProperty("spark_python_task") + private ResolvedStringParamsValues sparkPythonTask; + + @JsonProperty("spark_submit_task") + private ResolvedStringParamsValues sparkSubmitTask; + + @JsonProperty("sql_task") + private ResolvedParamPairValues sqlTask; + + public ResolvedValuesPb setConditionTask(ResolvedConditionTaskValues conditionTask) { + this.conditionTask = conditionTask; + return this; + } + + public ResolvedConditionTaskValues getConditionTask() { + return conditionTask; + } + + public ResolvedValuesPb setDbtTask(ResolvedDbtTaskValues dbtTask) { + this.dbtTask = dbtTask; + return this; + } + + public ResolvedDbtTaskValues getDbtTask() { + return dbtTask; + } + + public ResolvedValuesPb setNotebookTask(ResolvedNotebookTaskValues notebookTask) { + this.notebookTask = notebookTask; + return this; + } + + public ResolvedNotebookTaskValues getNotebookTask() { + return notebookTask; + } + + public ResolvedValuesPb setPythonWheelTask(ResolvedPythonWheelTaskValues pythonWheelTask) { + this.pythonWheelTask = pythonWheelTask; + return this; + } + + public ResolvedPythonWheelTaskValues getPythonWheelTask() { + return pythonWheelTask; + } + + public ResolvedValuesPb setRunJobTask(ResolvedRunJobTaskValues runJobTask) { + this.runJobTask = runJobTask; + return this; + } + + public ResolvedRunJobTaskValues getRunJobTask() { + return runJobTask; + } + + public ResolvedValuesPb setSimulationTask(ResolvedParamPairValues simulationTask) { + this.simulationTask = simulationTask; + return this; + } + + public ResolvedParamPairValues getSimulationTask() { + return simulationTask; + } + + public ResolvedValuesPb setSparkJarTask(ResolvedStringParamsValues sparkJarTask) { + this.sparkJarTask = sparkJarTask; + return this; + } + + public ResolvedStringParamsValues getSparkJarTask() { + return sparkJarTask; + } + + public ResolvedValuesPb setSparkPythonTask(ResolvedStringParamsValues sparkPythonTask) { + this.sparkPythonTask = sparkPythonTask; + return this; + } + + public ResolvedStringParamsValues getSparkPythonTask() { + return sparkPythonTask; + } + + public ResolvedValuesPb setSparkSubmitTask(ResolvedStringParamsValues sparkSubmitTask) { + this.sparkSubmitTask = sparkSubmitTask; + return this; + } + + public ResolvedStringParamsValues getSparkSubmitTask() { + return sparkSubmitTask; + } + + public ResolvedValuesPb setSqlTask(ResolvedParamPairValues sqlTask) { + this.sqlTask = sqlTask; + return this; + } + + public ResolvedParamPairValues getSqlTask() { + return sqlTask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResolvedValuesPb that = (ResolvedValuesPb) o; + return Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dbtTask, that.dbtTask) + && Objects.equals(notebookTask, that.notebookTask) + && Objects.equals(pythonWheelTask, that.pythonWheelTask) + && Objects.equals(runJobTask, that.runJobTask) + && Objects.equals(simulationTask, that.simulationTask) + && Objects.equals(sparkJarTask, that.sparkJarTask) + && Objects.equals(sparkPythonTask, that.sparkPythonTask) + && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) + && Objects.equals(sqlTask, that.sqlTask); + } + + @Override + public int hashCode() { + return Objects.hash( + conditionTask, + dbtTask, + notebookTask, + pythonWheelTask, + runJobTask, + simulationTask, + sparkJarTask, + sparkPythonTask, + sparkSubmitTask, + sqlTask); + } + + @Override + public String toString() { + return new ToStringer(ResolvedValuesPb.class) + .add("conditionTask", conditionTask) + .add("dbtTask", dbtTask) + .add("notebookTask", notebookTask) + .add("pythonWheelTask", pythonWheelTask) + .add("runJobTask", runJobTask) + .add("simulationTask", simulationTask) + .add("sparkJarTask", sparkJarTask) + .add("sparkPythonTask", sparkPythonTask) + .add("sparkSubmitTask", sparkSubmitTask) + .add("sqlTask", sqlTask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 2e090dcd9..73dd77755 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Run was retrieved successfully */ @Generated +@JsonSerialize(using = Run.RunSerializer.class) +@JsonDeserialize(using = Run.RunDeserializer.class) public class Run { /** * The sequence number of this run attempt for a triggered job run. The initial attempt of a run @@ -18,7 +29,6 @@ public class Run { * original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they * succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job. */ - @JsonProperty("attempt_number") private Long attemptNumber; /** @@ -27,29 +37,24 @@ public class Run { * and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. * The total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("cleanup_duration") private Long cleanupDuration; /** * The cluster used for this run. If the run is specified to use a new cluster, this field is set * once the Jobs service has requested a cluster for the run. */ - @JsonProperty("cluster_instance") private ClusterInstance clusterInstance; /** A snapshot of the job’s cluster specification when this run was created. */ - @JsonProperty("cluster_spec") private ClusterSpec clusterSpec; /** * The creator user name. This field won’t be included in the response if the user has already * been deleted. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** Description of the run */ - @JsonProperty("description") private String description; /** @@ -61,14 +66,12 @@ public class Run { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. */ - @JsonProperty("end_time") private Long endTime; /** @@ -78,7 +81,6 @@ public class Run { * `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("execution_duration") private Long executionDuration; /** @@ -92,7 +94,6 @@ public class Run { *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** @@ -100,11 +101,9 @@ public class Run { * They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 * :method:jobs/listruns requests with `expand_tasks=true`. */ - @JsonProperty("has_more") private Boolean hasMore; /** Only populated by for-each iterations. The parent for-each task is located in tasks array. */ - @JsonProperty("iterations") private Collection iterations; /** @@ -113,15 +112,12 @@ public class Run { * task settings. If more than 100 job clusters are available, you can paginate through them using * :method:jobs/getrun. */ - @JsonProperty("job_clusters") private Collection jobClusters; /** The canonical identifier of the job that contains this run. */ - @JsonProperty("job_id") private Long jobId; /** Job-level parameters used in the run */ - @JsonProperty("job_parameters") private Collection jobParameters; /** @@ -129,50 +125,39 @@ public class Run { * populated with the job run ID. For task runs, the field is populated with the ID of the job run * that the task run belongs to. */ - @JsonProperty("job_run_id") private Long jobRunId; /** A token that can be used to list the next page of array properties. */ - @JsonProperty("next_page_token") private String nextPageToken; /** A unique identifier for this job run. This is set to the same value as `run_id`. */ - @JsonProperty("number_in_job") private Long numberInJob; /** * If this run is a retry of a prior run attempt, this field contains the run_id of the original * attempt; otherwise, it is the same as the run_id. */ - @JsonProperty("original_attempt_run_id") private Long originalAttemptRunId; /** The parameters used for this run. */ - @JsonProperty("overriding_parameters") private RunParameters overridingParameters; /** The time in milliseconds that the run has spent in the queue. */ - @JsonProperty("queue_duration") private Long queueDuration; /** The repair history of the run. */ - @JsonProperty("repair_history") private Collection repairHistory; /** The time in milliseconds it took the job run and all of its repairs to finish. */ - @JsonProperty("run_duration") private Long runDuration; /** The canonical identifier of the run. This ID is unique across all runs of all jobs. */ - @JsonProperty("run_id") private Long runId; /** An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding. */ - @JsonProperty("run_name") private String runName; /** The URL to the detail page of the run. */ - @JsonProperty("run_page_url") private String runPageUrl; /** @@ -183,11 +168,9 @@ public class Run { *

[dbutils.notebook.run]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow */ - @JsonProperty("run_type") private RunType runType; /** The cron schedule that triggered this run if it was triggered by the periodic scheduler. */ - @JsonProperty("schedule") private CronSchedule schedule; /** @@ -197,7 +180,6 @@ public class Run { * the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("setup_duration") private Long setupDuration; /** @@ -205,15 +187,12 @@ public class Run { * This may not be the time when the job task starts executing, for example, if the job is * scheduled to run on a new cluster, this is the time the cluster creation call is issued. */ - @JsonProperty("start_time") private Long startTime; /** Deprecated. Please use the `status` field instead. */ - @JsonProperty("state") private RunState state; /** The current status of the run */ - @JsonProperty("status") private RunStatus status; /** @@ -222,7 +201,6 @@ public class Run { * paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object * root to determine if more results are available. */ - @JsonProperty("tasks") private Collection tasks; /** @@ -237,11 +215,9 @@ public class Run { * triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to * manually restart a continuous job run. */ - @JsonProperty("trigger") private TriggerType trigger; /** Additional details about what triggered the run */ - @JsonProperty("trigger_info") private TriggerInfo triggerInfo; public Run setAttemptNumber(Long attemptNumber) { @@ -681,4 +657,105 @@ public String toString() { .add("triggerInfo", triggerInfo) .toString(); } + + RunPb toPb() { + RunPb pb = new RunPb(); + pb.setAttemptNumber(attemptNumber); + pb.setCleanupDuration(cleanupDuration); + pb.setClusterInstance(clusterInstance); + pb.setClusterSpec(clusterSpec); + pb.setCreatorUserName(creatorUserName); + pb.setDescription(description); + pb.setEffectivePerformanceTarget(effectivePerformanceTarget); + pb.setEndTime(endTime); + pb.setExecutionDuration(executionDuration); + pb.setGitSource(gitSource); + pb.setHasMore(hasMore); + pb.setIterations(iterations); + pb.setJobClusters(jobClusters); + pb.setJobId(jobId); + pb.setJobParameters(jobParameters); + pb.setJobRunId(jobRunId); + pb.setNextPageToken(nextPageToken); + pb.setNumberInJob(numberInJob); + pb.setOriginalAttemptRunId(originalAttemptRunId); + pb.setOverridingParameters(overridingParameters); + pb.setQueueDuration(queueDuration); + pb.setRepairHistory(repairHistory); + pb.setRunDuration(runDuration); + pb.setRunId(runId); + pb.setRunName(runName); + pb.setRunPageUrl(runPageUrl); + pb.setRunType(runType); + pb.setSchedule(schedule); + pb.setSetupDuration(setupDuration); + pb.setStartTime(startTime); + pb.setState(state); + pb.setStatus(status); + pb.setTasks(tasks); + pb.setTrigger(trigger); + pb.setTriggerInfo(triggerInfo); + + return pb; + } + + static Run fromPb(RunPb pb) { + Run model = new Run(); + model.setAttemptNumber(pb.getAttemptNumber()); + model.setCleanupDuration(pb.getCleanupDuration()); + model.setClusterInstance(pb.getClusterInstance()); + model.setClusterSpec(pb.getClusterSpec()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setDescription(pb.getDescription()); + model.setEffectivePerformanceTarget(pb.getEffectivePerformanceTarget()); + model.setEndTime(pb.getEndTime()); + model.setExecutionDuration(pb.getExecutionDuration()); + model.setGitSource(pb.getGitSource()); + model.setHasMore(pb.getHasMore()); + model.setIterations(pb.getIterations()); + model.setJobClusters(pb.getJobClusters()); + model.setJobId(pb.getJobId()); + model.setJobParameters(pb.getJobParameters()); + model.setJobRunId(pb.getJobRunId()); + model.setNextPageToken(pb.getNextPageToken()); + model.setNumberInJob(pb.getNumberInJob()); + model.setOriginalAttemptRunId(pb.getOriginalAttemptRunId()); + model.setOverridingParameters(pb.getOverridingParameters()); + model.setQueueDuration(pb.getQueueDuration()); + model.setRepairHistory(pb.getRepairHistory()); + model.setRunDuration(pb.getRunDuration()); + model.setRunId(pb.getRunId()); + model.setRunName(pb.getRunName()); + model.setRunPageUrl(pb.getRunPageUrl()); + model.setRunType(pb.getRunType()); + model.setSchedule(pb.getSchedule()); + model.setSetupDuration(pb.getSetupDuration()); + model.setStartTime(pb.getStartTime()); + model.setState(pb.getState()); + model.setStatus(pb.getStatus()); + model.setTasks(pb.getTasks()); + model.setTrigger(pb.getTrigger()); + model.setTriggerInfo(pb.getTriggerInfo()); + + return model; + } + + public static class RunSerializer extends JsonSerializer { + @Override + public void serialize(Run value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunDeserializer extends JsonDeserializer { + @Override + public Run deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunPb pb = mapper.readValue(p, RunPb.class); + return Run.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java index 8a48eb92d..7c62b00c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RunConditionTask.RunConditionTaskSerializer.class) +@JsonDeserialize(using = RunConditionTask.RunConditionTaskDeserializer.class) public class RunConditionTask { /** * The left operand of the condition task. Can be either a string value or a job state or * parameter reference. */ - @JsonProperty("left") private String left; /** @@ -26,21 +36,18 @@ public class RunConditionTask { * `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or * `“false”` for the comparison. */ - @JsonProperty("op") private ConditionTaskOp op; /** * The condition expression evaluation result. Filled in if the task was successfully completed. * Can be `"true"` or `"false"` */ - @JsonProperty("outcome") private String outcome; /** * The right operand of the condition task. Can be either a string value or a job state or * parameter reference. */ - @JsonProperty("right") private String right; public RunConditionTask setLeft(String left) { @@ -104,4 +111,44 @@ public String toString() { .add("right", right) .toString(); } + + RunConditionTaskPb toPb() { + RunConditionTaskPb pb = new RunConditionTaskPb(); + pb.setLeft(left); + pb.setOp(op); + pb.setOutcome(outcome); + pb.setRight(right); + + return pb; + } + + static RunConditionTask fromPb(RunConditionTaskPb pb) { + RunConditionTask model = new RunConditionTask(); + model.setLeft(pb.getLeft()); + model.setOp(pb.getOp()); + model.setOutcome(pb.getOutcome()); + model.setRight(pb.getRight()); + + return model; + } + + public static class RunConditionTaskSerializer extends JsonSerializer { + @Override + public void serialize(RunConditionTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunConditionTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunConditionTaskDeserializer extends JsonDeserializer { + @Override + public RunConditionTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunConditionTaskPb pb = mapper.readValue(p, RunConditionTaskPb.class); + return RunConditionTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskPb.java new file mode 100755 index 000000000..78c167d99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RunConditionTaskPb { + @JsonProperty("left") + private String left; + + @JsonProperty("op") + private ConditionTaskOp op; + + @JsonProperty("outcome") + private String outcome; + + @JsonProperty("right") + private String right; + + public RunConditionTaskPb setLeft(String left) { + this.left = left; + return this; + } + + public String getLeft() { + return left; + } + + public RunConditionTaskPb setOp(ConditionTaskOp op) { + this.op = op; + return this; + } + + public ConditionTaskOp getOp() { + return op; + } + + public RunConditionTaskPb setOutcome(String outcome) { + this.outcome = outcome; + return this; + } + + public String getOutcome() { + return outcome; + } + + public RunConditionTaskPb setRight(String right) { + this.right = right; + return this; + } + + public String getRight() { + return right; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunConditionTaskPb that = (RunConditionTaskPb) o; + return Objects.equals(left, that.left) + && Objects.equals(op, that.op) + && Objects.equals(outcome, that.outcome) + && Objects.equals(right, that.right); + } + + @Override + public int hashCode() { + return Objects.hash(left, op, outcome, right); + } + + @Override + public String toString() { + return new ToStringer(RunConditionTaskPb.class) + .add("left", left) + .add("op", op) + .add("outcome", outcome) + .add("right", right) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java index c146ae3cb..ee4d9539e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java @@ -4,33 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RunForEachTask.RunForEachTaskSerializer.class) +@JsonDeserialize(using = RunForEachTask.RunForEachTaskDeserializer.class) public class RunForEachTask { /** * An optional maximum allowed number of concurrent runs of the task. Set this value if you want * to be able to execute multiple runs of the task concurrently. */ - @JsonProperty("concurrency") private Long concurrency; /** * Array for task to iterate on. This can be a JSON string or a reference to an array parameter. */ - @JsonProperty("inputs") private String inputs; /** * Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of * an For each task */ - @JsonProperty("stats") private ForEachStats stats; /** Configuration for the task that will be run for each element in the array */ - @JsonProperty("task") private Task task; public RunForEachTask setConcurrency(Long concurrency) { @@ -94,4 +101,44 @@ public String toString() { .add("task", task) .toString(); } + + RunForEachTaskPb toPb() { + RunForEachTaskPb pb = new RunForEachTaskPb(); + pb.setConcurrency(concurrency); + pb.setInputs(inputs); + pb.setStats(stats); + pb.setTask(task); + + return pb; + } + + static RunForEachTask fromPb(RunForEachTaskPb pb) { + RunForEachTask model = new RunForEachTask(); + model.setConcurrency(pb.getConcurrency()); + model.setInputs(pb.getInputs()); + model.setStats(pb.getStats()); + model.setTask(pb.getTask()); + + return model; + } + + public static class RunForEachTaskSerializer extends JsonSerializer { + @Override + public void serialize(RunForEachTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunForEachTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunForEachTaskDeserializer extends JsonDeserializer { + @Override + public RunForEachTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunForEachTaskPb pb = mapper.readValue(p, RunForEachTaskPb.class); + return RunForEachTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTaskPb.java new file mode 100755 index 000000000..322af0968 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTaskPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RunForEachTaskPb { + @JsonProperty("concurrency") + private Long concurrency; + + @JsonProperty("inputs") + private String inputs; + + @JsonProperty("stats") + private ForEachStats stats; + + @JsonProperty("task") + private Task task; + + public RunForEachTaskPb setConcurrency(Long concurrency) { + this.concurrency = concurrency; + return this; + } + + public Long getConcurrency() { + return concurrency; + } + + public RunForEachTaskPb setInputs(String inputs) { + this.inputs = inputs; + return this; + } + + public String getInputs() { + return inputs; + } + + public RunForEachTaskPb setStats(ForEachStats stats) { + this.stats = stats; + return this; + } + + public ForEachStats getStats() { + return stats; + } + + public RunForEachTaskPb setTask(Task task) { + this.task = task; + return this; + } + + public Task getTask() { + return task; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunForEachTaskPb that = (RunForEachTaskPb) o; + return Objects.equals(concurrency, that.concurrency) + && Objects.equals(inputs, that.inputs) + && Objects.equals(stats, that.stats) + && Objects.equals(task, that.task); + } + + @Override + public int hashCode() { + return Objects.hash(concurrency, inputs, stats, task); + } + + @Override + public String toString() { + return new ToStringer(RunForEachTaskPb.class) + .add("concurrency", concurrency) + .add("inputs", inputs) + .add("stats", stats) + .add("task", task) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutput.java index a96e44381..994c7b229 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutput.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RunJobOutput.RunJobOutputSerializer.class) +@JsonDeserialize(using = RunJobOutput.RunJobOutputDeserializer.class) public class RunJobOutput { /** The run id of the triggered job run */ - @JsonProperty("run_id") private Long runId; public RunJobOutput setRunId(Long runId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(RunJobOutput.class).add("runId", runId).toString(); } + + RunJobOutputPb toPb() { + RunJobOutputPb pb = new RunJobOutputPb(); + pb.setRunId(runId); + + return pb; + } + + static RunJobOutput fromPb(RunJobOutputPb pb) { + RunJobOutput model = new RunJobOutput(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class RunJobOutputSerializer extends JsonSerializer { + @Override + public void serialize(RunJobOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunJobOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunJobOutputDeserializer extends JsonDeserializer { + @Override + public RunJobOutput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunJobOutputPb pb = mapper.readValue(p, RunJobOutputPb.class); + return RunJobOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutputPb.java new file mode 100755 index 000000000..d68d9c936 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutputPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RunJobOutputPb { + @JsonProperty("run_id") + private Long runId; + + public RunJobOutputPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunJobOutputPb that = (RunJobOutputPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(RunJobOutputPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java index 593a3dc97..5042fade5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = RunJobTask.RunJobTaskSerializer.class) +@JsonDeserialize(using = RunJobTask.RunJobTaskDeserializer.class) public class RunJobTask { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ - @JsonProperty("dbt_commands") private Collection dbtCommands; /** @@ -29,15 +39,12 @@ public class RunJobTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("jar_params") private Collection jarParams; /** ID of the job to trigger. */ - @JsonProperty("job_id") private Long jobId; /** Job-level parameters used to trigger the job. */ - @JsonProperty("job_parameters") private Map jobParameters; /** @@ -57,15 +64,12 @@ public class RunJobTask { *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ - @JsonProperty("notebook_params") private Map notebookParams; /** Controls whether the pipeline should perform a full refresh */ - @JsonProperty("pipeline_params") private PipelineParams pipelineParams; /** */ - @JsonProperty("python_named_params") private Map pythonNamedParams; /** @@ -84,7 +88,6 @@ public class RunJobTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("python_params") private Collection pythonParams; /** @@ -104,14 +107,12 @@ public class RunJobTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; /** * A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john * doe", "age": "35"}`. The SQL alert task does not support custom parameters. */ - @JsonProperty("sql_params") private Map sqlParams; public RunJobTask setDbtCommands(Collection dbtCommands) { @@ -251,4 +252,55 @@ public String toString() { .add("sqlParams", sqlParams) .toString(); } + + RunJobTaskPb toPb() { + RunJobTaskPb pb = new RunJobTaskPb(); + pb.setDbtCommands(dbtCommands); + pb.setJarParams(jarParams); + pb.setJobId(jobId); + pb.setJobParameters(jobParameters); + pb.setNotebookParams(notebookParams); + pb.setPipelineParams(pipelineParams); + pb.setPythonNamedParams(pythonNamedParams); + pb.setPythonParams(pythonParams); + pb.setSparkSubmitParams(sparkSubmitParams); + pb.setSqlParams(sqlParams); + + return pb; + } + + static RunJobTask fromPb(RunJobTaskPb pb) { + RunJobTask model = new RunJobTask(); + model.setDbtCommands(pb.getDbtCommands()); + model.setJarParams(pb.getJarParams()); + model.setJobId(pb.getJobId()); + model.setJobParameters(pb.getJobParameters()); + model.setNotebookParams(pb.getNotebookParams()); + model.setPipelineParams(pb.getPipelineParams()); + model.setPythonNamedParams(pb.getPythonNamedParams()); + model.setPythonParams(pb.getPythonParams()); + model.setSparkSubmitParams(pb.getSparkSubmitParams()); + model.setSqlParams(pb.getSqlParams()); + + return model; + } + + public static class RunJobTaskSerializer extends JsonSerializer { + @Override + public void serialize(RunJobTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunJobTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunJobTaskDeserializer extends JsonDeserializer { + @Override + public RunJobTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunJobTaskPb pb = mapper.readValue(p, RunJobTaskPb.class); + return RunJobTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTaskPb.java new file mode 100755 index 000000000..d00a17654 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTaskPb.java @@ -0,0 +1,181 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class RunJobTaskPb { + @JsonProperty("dbt_commands") + private Collection dbtCommands; + + @JsonProperty("jar_params") + private Collection jarParams; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("job_parameters") + private Map jobParameters; + + @JsonProperty("notebook_params") + private Map notebookParams; + + @JsonProperty("pipeline_params") + private PipelineParams pipelineParams; + + @JsonProperty("python_named_params") + private Map pythonNamedParams; + + @JsonProperty("python_params") + private Collection pythonParams; + + @JsonProperty("spark_submit_params") + private Collection sparkSubmitParams; + + @JsonProperty("sql_params") + private Map sqlParams; + + public RunJobTaskPb setDbtCommands(Collection dbtCommands) { + this.dbtCommands = dbtCommands; + return this; + } + + public Collection getDbtCommands() { + return dbtCommands; + } + + public RunJobTaskPb setJarParams(Collection jarParams) { + this.jarParams = jarParams; + return this; + } + + public Collection getJarParams() { + return jarParams; + } + + public RunJobTaskPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public RunJobTaskPb setJobParameters(Map jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Map getJobParameters() { + return jobParameters; + } + + public RunJobTaskPb setNotebookParams(Map notebookParams) { + this.notebookParams = notebookParams; + return this; + } + + public Map getNotebookParams() { + return notebookParams; + } + + public RunJobTaskPb setPipelineParams(PipelineParams pipelineParams) { + this.pipelineParams = pipelineParams; + return this; + } + + public PipelineParams getPipelineParams() { + return pipelineParams; + } + + public RunJobTaskPb setPythonNamedParams(Map pythonNamedParams) { + this.pythonNamedParams = pythonNamedParams; + return this; + } + + public Map getPythonNamedParams() { + return pythonNamedParams; + } + + public RunJobTaskPb setPythonParams(Collection pythonParams) { + this.pythonParams = pythonParams; + return this; + } + + public Collection getPythonParams() { + return pythonParams; + } + + public RunJobTaskPb setSparkSubmitParams(Collection sparkSubmitParams) { + this.sparkSubmitParams = sparkSubmitParams; + return this; + } + + public Collection getSparkSubmitParams() { + return sparkSubmitParams; + } + + public RunJobTaskPb setSqlParams(Map sqlParams) { + this.sqlParams = sqlParams; + return this; + } + + public Map getSqlParams() { + return sqlParams; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunJobTaskPb that = (RunJobTaskPb) o; + return Objects.equals(dbtCommands, that.dbtCommands) + && Objects.equals(jarParams, that.jarParams) + && Objects.equals(jobId, that.jobId) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(pipelineParams, that.pipelineParams) + && Objects.equals(pythonNamedParams, that.pythonNamedParams) + && Objects.equals(pythonParams, that.pythonParams) + && Objects.equals(sparkSubmitParams, that.sparkSubmitParams) + && Objects.equals(sqlParams, that.sqlParams); + } + + @Override + public int hashCode() { + return Objects.hash( + dbtCommands, + jarParams, + jobId, + jobParameters, + notebookParams, + pipelineParams, + pythonNamedParams, + pythonParams, + sparkSubmitParams, + sqlParams); + } + + @Override + public String toString() { + return new ToStringer(RunJobTaskPb.class) + .add("dbtCommands", dbtCommands) + .add("jarParams", jarParams) + .add("jobId", jobId) + .add("jobParameters", jobParameters) + .add("notebookParams", notebookParams) + .add("pipelineParams", pipelineParams) + .add("pythonNamedParams", pythonNamedParams) + .add("pythonParams", pythonParams) + .add("sparkSubmitParams", sparkSubmitParams) + .add("sqlParams", sqlParams) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index 4b2a28f7e..7c2899fe5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = RunNow.RunNowSerializer.class) +@JsonDeserialize(using = RunNow.RunNowDeserializer.class) public class RunNow { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ - @JsonProperty("dbt_commands") private Collection dbtCommands; /** @@ -32,7 +42,6 @@ public class RunNow { * *

[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html */ - @JsonProperty("idempotency_token") private String idempotencyToken; /** @@ -46,15 +55,12 @@ public class RunNow { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("jar_params") private Collection jarParams; /** The ID of the job to be executed */ - @JsonProperty("job_id") private Long jobId; /** Job-level parameters used in the run. for example `"param": "overriding_val"` */ - @JsonProperty("job_parameters") private Map jobParameters; /** @@ -74,14 +80,12 @@ public class RunNow { *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ - @JsonProperty("notebook_params") private Map notebookParams; /** * A list of task keys to run inside of the job. If this field is not provided, all tasks in the * job will be run. */ - @JsonProperty("only") private Collection only; /** @@ -93,15 +97,12 @@ public class RunNow { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("performance_target") private PerformanceTarget performanceTarget; /** Controls whether the pipeline should perform a full refresh */ - @JsonProperty("pipeline_params") private PipelineParams pipelineParams; /** */ - @JsonProperty("python_named_params") private Map pythonNamedParams; /** @@ -120,11 +121,9 @@ public class RunNow { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("python_params") private Collection pythonParams; /** The queue settings of the run. */ - @JsonProperty("queue") private QueueSettings queue; /** @@ -144,14 +143,12 @@ public class RunNow { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; /** * A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john * doe", "age": "35"}`. The SQL alert task does not support custom parameters. */ - @JsonProperty("sql_params") private Map sqlParams; public RunNow setDbtCommands(Collection dbtCommands) { @@ -339,4 +336,63 @@ public String toString() { .add("sqlParams", sqlParams) .toString(); } + + RunNowPb toPb() { + RunNowPb pb = new RunNowPb(); + pb.setDbtCommands(dbtCommands); + pb.setIdempotencyToken(idempotencyToken); + pb.setJarParams(jarParams); + pb.setJobId(jobId); + pb.setJobParameters(jobParameters); + pb.setNotebookParams(notebookParams); + pb.setOnly(only); + pb.setPerformanceTarget(performanceTarget); + pb.setPipelineParams(pipelineParams); + pb.setPythonNamedParams(pythonNamedParams); + pb.setPythonParams(pythonParams); + pb.setQueue(queue); + pb.setSparkSubmitParams(sparkSubmitParams); + pb.setSqlParams(sqlParams); + + return pb; + } + + static RunNow fromPb(RunNowPb pb) { + RunNow model = new RunNow(); + model.setDbtCommands(pb.getDbtCommands()); + model.setIdempotencyToken(pb.getIdempotencyToken()); + model.setJarParams(pb.getJarParams()); + model.setJobId(pb.getJobId()); + model.setJobParameters(pb.getJobParameters()); + model.setNotebookParams(pb.getNotebookParams()); + model.setOnly(pb.getOnly()); + model.setPerformanceTarget(pb.getPerformanceTarget()); + model.setPipelineParams(pb.getPipelineParams()); + model.setPythonNamedParams(pb.getPythonNamedParams()); + model.setPythonParams(pb.getPythonParams()); + model.setQueue(pb.getQueue()); + model.setSparkSubmitParams(pb.getSparkSubmitParams()); + model.setSqlParams(pb.getSqlParams()); + + return model; + } + + public static class RunNowSerializer extends JsonSerializer { + @Override + public void serialize(RunNow value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunNowPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunNowDeserializer extends JsonDeserializer { + @Override + public RunNow deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunNowPb pb = mapper.readValue(p, RunNowPb.class); + return RunNow.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowPb.java new file mode 100755 index 000000000..42db53f28 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowPb.java @@ -0,0 +1,241 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class RunNowPb { + @JsonProperty("dbt_commands") + private Collection dbtCommands; + + @JsonProperty("idempotency_token") + private String idempotencyToken; + + @JsonProperty("jar_params") + private Collection jarParams; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("job_parameters") + private Map jobParameters; + + @JsonProperty("notebook_params") + private Map notebookParams; + + @JsonProperty("only") + private Collection only; + + @JsonProperty("performance_target") + private PerformanceTarget performanceTarget; + + @JsonProperty("pipeline_params") + private PipelineParams pipelineParams; + + @JsonProperty("python_named_params") + private Map pythonNamedParams; + + @JsonProperty("python_params") + private Collection pythonParams; + + @JsonProperty("queue") + private QueueSettings queue; + + @JsonProperty("spark_submit_params") + private Collection sparkSubmitParams; + + @JsonProperty("sql_params") + private Map sqlParams; + + public RunNowPb setDbtCommands(Collection dbtCommands) { + this.dbtCommands = dbtCommands; + return this; + } + + public Collection getDbtCommands() { + return dbtCommands; + } + + public RunNowPb setIdempotencyToken(String idempotencyToken) { + this.idempotencyToken = idempotencyToken; + return this; + } + + public String getIdempotencyToken() { + return idempotencyToken; + } + + public RunNowPb setJarParams(Collection jarParams) { + this.jarParams = jarParams; + return this; + } + + public Collection getJarParams() { + return jarParams; + } + + public RunNowPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public RunNowPb setJobParameters(Map jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Map getJobParameters() { + return jobParameters; + } + + public RunNowPb setNotebookParams(Map notebookParams) { + this.notebookParams = notebookParams; + return this; + } + + public Map getNotebookParams() { + return notebookParams; + } + + public RunNowPb setOnly(Collection only) { + this.only = only; + return this; + } + + public Collection getOnly() { + return only; + } + + public RunNowPb setPerformanceTarget(PerformanceTarget performanceTarget) { + this.performanceTarget = performanceTarget; + return this; + } + + public PerformanceTarget getPerformanceTarget() { + return performanceTarget; + } + + public RunNowPb setPipelineParams(PipelineParams pipelineParams) { + this.pipelineParams = pipelineParams; + return this; + } + + public PipelineParams getPipelineParams() { + return pipelineParams; + } + + public RunNowPb setPythonNamedParams(Map pythonNamedParams) { + this.pythonNamedParams = pythonNamedParams; + return this; + } + + public Map getPythonNamedParams() { + return pythonNamedParams; + } + + public RunNowPb setPythonParams(Collection pythonParams) { + this.pythonParams = pythonParams; + return this; + } + + public Collection getPythonParams() { + return pythonParams; + } + + public RunNowPb setQueue(QueueSettings queue) { + this.queue = queue; + return this; + } + + public QueueSettings getQueue() { + return queue; + } + + public RunNowPb setSparkSubmitParams(Collection sparkSubmitParams) { + this.sparkSubmitParams = sparkSubmitParams; + return this; + } + + public Collection getSparkSubmitParams() { + return sparkSubmitParams; + } + + public RunNowPb setSqlParams(Map sqlParams) { + this.sqlParams = sqlParams; + return this; + } + + public Map getSqlParams() { + return sqlParams; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunNowPb that = (RunNowPb) o; + return Objects.equals(dbtCommands, that.dbtCommands) + && Objects.equals(idempotencyToken, that.idempotencyToken) + && Objects.equals(jarParams, that.jarParams) + && Objects.equals(jobId, that.jobId) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(only, that.only) + && Objects.equals(performanceTarget, that.performanceTarget) + && Objects.equals(pipelineParams, that.pipelineParams) + && Objects.equals(pythonNamedParams, that.pythonNamedParams) + && Objects.equals(pythonParams, that.pythonParams) + && Objects.equals(queue, that.queue) + && Objects.equals(sparkSubmitParams, that.sparkSubmitParams) + && Objects.equals(sqlParams, that.sqlParams); + } + + @Override + public int hashCode() { + return Objects.hash( + dbtCommands, + idempotencyToken, + jarParams, + jobId, + jobParameters, + notebookParams, + only, + performanceTarget, + pipelineParams, + pythonNamedParams, + pythonParams, + queue, + sparkSubmitParams, + sqlParams); + } + + @Override + public String toString() { + return new ToStringer(RunNowPb.class) + .add("dbtCommands", dbtCommands) + .add("idempotencyToken", idempotencyToken) + .add("jarParams", jarParams) + .add("jobId", jobId) + .add("jobParameters", jobParameters) + .add("notebookParams", notebookParams) + .add("only", only) + .add("performanceTarget", performanceTarget) + .add("pipelineParams", pipelineParams) + .add("pythonNamedParams", pythonNamedParams) + .add("pythonParams", pythonParams) + .add("queue", queue) + .add("sparkSubmitParams", sparkSubmitParams) + .add("sqlParams", sqlParams) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java index 41460356d..5203ce0cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Run was started successfully. */ @Generated +@JsonSerialize(using = RunNowResponse.RunNowResponseSerializer.class) +@JsonDeserialize(using = RunNowResponse.RunNowResponseDeserializer.class) public class RunNowResponse { /** A unique identifier for this job run. This is set to the same value as `run_id`. */ - @JsonProperty("number_in_job") private Long numberInJob; /** The globally unique ID of the newly triggered run. */ - @JsonProperty("run_id") private Long runId; public RunNowResponse setNumberInJob(Long numberInJob) { @@ -56,4 +65,40 @@ public String toString() { .add("runId", runId) .toString(); } + + RunNowResponsePb toPb() { + RunNowResponsePb pb = new RunNowResponsePb(); + pb.setNumberInJob(numberInJob); + pb.setRunId(runId); + + return pb; + } + + static RunNowResponse fromPb(RunNowResponsePb pb) { + RunNowResponse model = new RunNowResponse(); + model.setNumberInJob(pb.getNumberInJob()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class RunNowResponseSerializer extends JsonSerializer { + @Override + public void serialize(RunNowResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunNowResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunNowResponseDeserializer extends JsonDeserializer { + @Override + public RunNowResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunNowResponsePb pb = mapper.readValue(p, RunNowResponsePb.class); + return RunNowResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponsePb.java new file mode 100755 index 000000000..503a5d4bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Run was started successfully. */ +@Generated +class RunNowResponsePb { + @JsonProperty("number_in_job") + private Long numberInJob; + + @JsonProperty("run_id") + private Long runId; + + public RunNowResponsePb setNumberInJob(Long numberInJob) { + this.numberInJob = numberInJob; + return this; + } + + public Long getNumberInJob() { + return numberInJob; + } + + public RunNowResponsePb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunNowResponsePb that = (RunNowResponsePb) o; + return Objects.equals(numberInJob, that.numberInJob) && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(numberInJob, runId); + } + + @Override + public String toString() { + return new ToStringer(RunNowResponsePb.class) + .add("numberInJob", numberInJob) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java index b635850c7..565bca206 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java @@ -4,37 +4,45 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Run output was retrieved successfully. */ @Generated +@JsonSerialize(using = RunOutput.RunOutputSerializer.class) +@JsonDeserialize(using = RunOutput.RunOutputDeserializer.class) public class RunOutput { /** The output of a clean rooms notebook task, if available */ - @JsonProperty("clean_rooms_notebook_output") private CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput; /** The output of a dashboard task, if available */ - @JsonProperty("dashboard_output") private DashboardTaskOutput dashboardOutput; + /** */ + private DbtCloudTaskOutput dbtCloudOutput; + /** The output of a dbt task, if available. */ - @JsonProperty("dbt_output") private DbtOutput dbtOutput; /** * An error message indicating why a task failed or why output is not available. The message is * unstructured, and its exact format is subject to change. */ - @JsonProperty("error") private String error; /** If there was an error executing the run, this field contains any available stack traces. */ - @JsonProperty("error_trace") private String errorTrace; /** */ - @JsonProperty("info") private String info; /** @@ -45,15 +53,12 @@ public class RunOutput { * *

Databricks restricts this API to return the last 5 MB of these logs. */ - @JsonProperty("logs") private String logs; /** Whether the logs are truncated. */ - @JsonProperty("logs_truncated") private Boolean logsTruncated; /** All details of the run except for its output. */ - @JsonProperty("metadata") private Run metadata; /** @@ -66,15 +71,12 @@ public class RunOutput { *

[ClusterLogConf]: * https://docs.databricks.com/dev-tools/api/latest/clusters.html#clusterlogconf */ - @JsonProperty("notebook_output") private NotebookOutput notebookOutput; /** The output of a run job task, if available */ - @JsonProperty("run_job_output") private RunJobOutput runJobOutput; /** The output of a SQL task, if available. */ - @JsonProperty("sql_output") private SqlOutput sqlOutput; public RunOutput setCleanRoomsNotebookOutput( @@ -96,6 +98,15 @@ public DashboardTaskOutput getDashboardOutput() { return dashboardOutput; } + public RunOutput setDbtCloudOutput(DbtCloudTaskOutput dbtCloudOutput) { + this.dbtCloudOutput = dbtCloudOutput; + return this; + } + + public DbtCloudTaskOutput getDbtCloudOutput() { + return dbtCloudOutput; + } + public RunOutput setDbtOutput(DbtOutput dbtOutput) { this.dbtOutput = dbtOutput; return this; @@ -193,6 +204,7 @@ public boolean equals(Object o) { RunOutput that = (RunOutput) o; return Objects.equals(cleanRoomsNotebookOutput, that.cleanRoomsNotebookOutput) && Objects.equals(dashboardOutput, that.dashboardOutput) + && Objects.equals(dbtCloudOutput, that.dbtCloudOutput) && Objects.equals(dbtOutput, that.dbtOutput) && Objects.equals(error, that.error) && Objects.equals(errorTrace, that.errorTrace) @@ -210,6 +222,7 @@ public int hashCode() { return Objects.hash( cleanRoomsNotebookOutput, dashboardOutput, + dbtCloudOutput, dbtOutput, error, errorTrace, @@ -227,6 +240,7 @@ public String toString() { return new ToStringer(RunOutput.class) .add("cleanRoomsNotebookOutput", cleanRoomsNotebookOutput) .add("dashboardOutput", dashboardOutput) + .add("dbtCloudOutput", dbtCloudOutput) .add("dbtOutput", dbtOutput) .add("error", error) .add("errorTrace", errorTrace) @@ -239,4 +253,61 @@ public String toString() { .add("sqlOutput", sqlOutput) .toString(); } + + RunOutputPb toPb() { + RunOutputPb pb = new RunOutputPb(); + pb.setCleanRoomsNotebookOutput(cleanRoomsNotebookOutput); + pb.setDashboardOutput(dashboardOutput); + pb.setDbtCloudOutput(dbtCloudOutput); + pb.setDbtOutput(dbtOutput); + pb.setError(error); + pb.setErrorTrace(errorTrace); + pb.setInfo(info); + pb.setLogs(logs); + pb.setLogsTruncated(logsTruncated); + pb.setMetadata(metadata); + pb.setNotebookOutput(notebookOutput); + pb.setRunJobOutput(runJobOutput); + pb.setSqlOutput(sqlOutput); + + return pb; + } + + static RunOutput fromPb(RunOutputPb pb) { + RunOutput model = new RunOutput(); + model.setCleanRoomsNotebookOutput(pb.getCleanRoomsNotebookOutput()); + model.setDashboardOutput(pb.getDashboardOutput()); + model.setDbtCloudOutput(pb.getDbtCloudOutput()); + model.setDbtOutput(pb.getDbtOutput()); + model.setError(pb.getError()); + model.setErrorTrace(pb.getErrorTrace()); + model.setInfo(pb.getInfo()); + model.setLogs(pb.getLogs()); + model.setLogsTruncated(pb.getLogsTruncated()); + model.setMetadata(pb.getMetadata()); + model.setNotebookOutput(pb.getNotebookOutput()); + model.setRunJobOutput(pb.getRunJobOutput()); + model.setSqlOutput(pb.getSqlOutput()); + + return model; + } + + public static class RunOutputSerializer extends JsonSerializer { + @Override + public void serialize(RunOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunOutputDeserializer extends JsonDeserializer { + @Override + public RunOutput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunOutputPb pb = mapper.readValue(p, RunOutputPb.class); + return RunOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutputPb.java new file mode 100755 index 000000000..33b96ee3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutputPb.java @@ -0,0 +1,226 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Run output was retrieved successfully. */ +@Generated +class RunOutputPb { + @JsonProperty("clean_rooms_notebook_output") + private CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput; + + @JsonProperty("dashboard_output") + private DashboardTaskOutput dashboardOutput; + + @JsonProperty("dbt_cloud_output") + private DbtCloudTaskOutput dbtCloudOutput; + + @JsonProperty("dbt_output") + private DbtOutput dbtOutput; + + @JsonProperty("error") + private String error; + + @JsonProperty("error_trace") + private String errorTrace; + + @JsonProperty("info") + private String info; + + @JsonProperty("logs") + private String logs; + + @JsonProperty("logs_truncated") + private Boolean logsTruncated; + + @JsonProperty("metadata") + private Run metadata; + + @JsonProperty("notebook_output") + private NotebookOutput notebookOutput; + + @JsonProperty("run_job_output") + private RunJobOutput runJobOutput; + + @JsonProperty("sql_output") + private SqlOutput sqlOutput; + + public RunOutputPb setCleanRoomsNotebookOutput( + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput) { + this.cleanRoomsNotebookOutput = cleanRoomsNotebookOutput; + return this; + } + + public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput getCleanRoomsNotebookOutput() { + return cleanRoomsNotebookOutput; + } + + public RunOutputPb setDashboardOutput(DashboardTaskOutput dashboardOutput) { + this.dashboardOutput = dashboardOutput; + return this; + } + + public DashboardTaskOutput getDashboardOutput() { + return dashboardOutput; + } + + public RunOutputPb setDbtCloudOutput(DbtCloudTaskOutput dbtCloudOutput) { + this.dbtCloudOutput = dbtCloudOutput; + return this; + } + + public DbtCloudTaskOutput getDbtCloudOutput() { + return dbtCloudOutput; + } + + public RunOutputPb setDbtOutput(DbtOutput dbtOutput) { + this.dbtOutput = dbtOutput; + return this; + } + + public DbtOutput getDbtOutput() { + return dbtOutput; + } + + public RunOutputPb setError(String error) { + this.error = error; + return this; + } + + public String getError() { + return error; + } + + public RunOutputPb setErrorTrace(String errorTrace) { + this.errorTrace = errorTrace; + return this; + } + + public String getErrorTrace() { + return errorTrace; + } + + public RunOutputPb setInfo(String info) { + this.info = info; + return this; + } + + public String getInfo() { + return info; + } + + public RunOutputPb setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + public RunOutputPb setLogsTruncated(Boolean logsTruncated) { + this.logsTruncated = logsTruncated; + return this; + } + + public Boolean getLogsTruncated() { + return logsTruncated; + } + + public RunOutputPb setMetadata(Run metadata) { + this.metadata = metadata; + return this; + } + + public Run getMetadata() { + return metadata; + } + + public RunOutputPb setNotebookOutput(NotebookOutput notebookOutput) { + this.notebookOutput = notebookOutput; + return this; + } + + public NotebookOutput getNotebookOutput() { + return notebookOutput; + } + + public RunOutputPb setRunJobOutput(RunJobOutput runJobOutput) { + this.runJobOutput = runJobOutput; + return this; + } + + public RunJobOutput getRunJobOutput() { + return runJobOutput; + } + + public RunOutputPb setSqlOutput(SqlOutput sqlOutput) { + this.sqlOutput = sqlOutput; + return this; + } + + public SqlOutput getSqlOutput() { + return sqlOutput; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunOutputPb that = (RunOutputPb) o; + return Objects.equals(cleanRoomsNotebookOutput, that.cleanRoomsNotebookOutput) + && Objects.equals(dashboardOutput, that.dashboardOutput) + && Objects.equals(dbtCloudOutput, that.dbtCloudOutput) + && Objects.equals(dbtOutput, that.dbtOutput) + && Objects.equals(error, that.error) + && Objects.equals(errorTrace, that.errorTrace) + && Objects.equals(info, that.info) + && Objects.equals(logs, that.logs) + && Objects.equals(logsTruncated, that.logsTruncated) + && Objects.equals(metadata, that.metadata) + && Objects.equals(notebookOutput, that.notebookOutput) + && Objects.equals(runJobOutput, that.runJobOutput) + && Objects.equals(sqlOutput, that.sqlOutput); + } + + @Override + public int hashCode() { + return Objects.hash( + cleanRoomsNotebookOutput, + dashboardOutput, + dbtCloudOutput, + dbtOutput, + error, + errorTrace, + info, + logs, + logsTruncated, + metadata, + notebookOutput, + runJobOutput, + sqlOutput); + } + + @Override + public String toString() { + return new ToStringer(RunOutputPb.class) + .add("cleanRoomsNotebookOutput", cleanRoomsNotebookOutput) + .add("dashboardOutput", dashboardOutput) + .add("dbtCloudOutput", dbtCloudOutput) + .add("dbtOutput", dbtOutput) + .add("error", error) + .add("errorTrace", errorTrace) + .add("info", info) + .add("logs", logs) + .add("logsTruncated", logsTruncated) + .add("metadata", metadata) + .add("notebookOutput", notebookOutput) + .add("runJobOutput", runJobOutput) + .add("sqlOutput", sqlOutput) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java index be39a1c4e..d18a64de3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = RunParameters.RunParametersSerializer.class) +@JsonDeserialize(using = RunParameters.RunParametersDeserializer.class) public class RunParameters { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ - @JsonProperty("dbt_commands") private Collection dbtCommands; /** @@ -29,7 +39,6 @@ public class RunParameters { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("jar_params") private Collection jarParams; /** @@ -49,15 +58,12 @@ public class RunParameters { *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ - @JsonProperty("notebook_params") private Map notebookParams; /** Controls whether the pipeline should perform a full refresh */ - @JsonProperty("pipeline_params") private PipelineParams pipelineParams; /** */ - @JsonProperty("python_named_params") private Map pythonNamedParams; /** @@ -76,7 +82,6 @@ public class RunParameters { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("python_params") private Collection pythonParams; /** @@ -96,14 +101,12 @@ public class RunParameters { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; /** * A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john * doe", "age": "35"}`. The SQL alert task does not support custom parameters. */ - @JsonProperty("sql_params") private Map sqlParams; public RunParameters setDbtCommands(Collection dbtCommands) { @@ -219,4 +222,51 @@ public String toString() { .add("sqlParams", sqlParams) .toString(); } + + RunParametersPb toPb() { + RunParametersPb pb = new RunParametersPb(); + pb.setDbtCommands(dbtCommands); + pb.setJarParams(jarParams); + pb.setNotebookParams(notebookParams); + pb.setPipelineParams(pipelineParams); + pb.setPythonNamedParams(pythonNamedParams); + pb.setPythonParams(pythonParams); + pb.setSparkSubmitParams(sparkSubmitParams); + pb.setSqlParams(sqlParams); + + return pb; + } + + static RunParameters fromPb(RunParametersPb pb) { + RunParameters model = new RunParameters(); + model.setDbtCommands(pb.getDbtCommands()); + model.setJarParams(pb.getJarParams()); + model.setNotebookParams(pb.getNotebookParams()); + model.setPipelineParams(pb.getPipelineParams()); + model.setPythonNamedParams(pb.getPythonNamedParams()); + model.setPythonParams(pb.getPythonParams()); + model.setSparkSubmitParams(pb.getSparkSubmitParams()); + model.setSqlParams(pb.getSqlParams()); + + return model; + } + + public static class RunParametersSerializer extends JsonSerializer { + @Override + public void serialize(RunParameters value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunParametersPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunParametersDeserializer extends JsonDeserializer { + @Override + public RunParameters deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunParametersPb pb = mapper.readValue(p, RunParametersPb.class); + return RunParameters.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParametersPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParametersPb.java new file mode 100755 index 000000000..7ef8c0484 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParametersPb.java @@ -0,0 +1,151 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class RunParametersPb { + @JsonProperty("dbt_commands") + private Collection dbtCommands; + + @JsonProperty("jar_params") + private Collection jarParams; + + @JsonProperty("notebook_params") + private Map notebookParams; + + @JsonProperty("pipeline_params") + private PipelineParams pipelineParams; + + @JsonProperty("python_named_params") + private Map pythonNamedParams; + + @JsonProperty("python_params") + private Collection pythonParams; + + @JsonProperty("spark_submit_params") + private Collection sparkSubmitParams; + + @JsonProperty("sql_params") + private Map sqlParams; + + public RunParametersPb setDbtCommands(Collection dbtCommands) { + this.dbtCommands = dbtCommands; + return this; + } + + public Collection getDbtCommands() { + return dbtCommands; + } + + public RunParametersPb setJarParams(Collection jarParams) { + this.jarParams = jarParams; + return this; + } + + public Collection getJarParams() { + return jarParams; + } + + public RunParametersPb setNotebookParams(Map notebookParams) { + this.notebookParams = notebookParams; + return this; + } + + public Map getNotebookParams() { + return notebookParams; + } + + public RunParametersPb setPipelineParams(PipelineParams pipelineParams) { + this.pipelineParams = pipelineParams; + return this; + } + + public PipelineParams getPipelineParams() { + return pipelineParams; + } + + public RunParametersPb setPythonNamedParams(Map pythonNamedParams) { + this.pythonNamedParams = pythonNamedParams; + return this; + } + + public Map getPythonNamedParams() { + return pythonNamedParams; + } + + public RunParametersPb setPythonParams(Collection pythonParams) { + this.pythonParams = pythonParams; + return this; + } + + public Collection getPythonParams() { + return pythonParams; + } + + public RunParametersPb setSparkSubmitParams(Collection sparkSubmitParams) { + this.sparkSubmitParams = sparkSubmitParams; + return this; + } + + public Collection getSparkSubmitParams() { + return sparkSubmitParams; + } + + public RunParametersPb setSqlParams(Map sqlParams) { + this.sqlParams = sqlParams; + return this; + } + + public Map getSqlParams() { + return sqlParams; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunParametersPb that = (RunParametersPb) o; + return Objects.equals(dbtCommands, that.dbtCommands) + && Objects.equals(jarParams, that.jarParams) + && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(pipelineParams, that.pipelineParams) + && Objects.equals(pythonNamedParams, that.pythonNamedParams) + && Objects.equals(pythonParams, that.pythonParams) + && Objects.equals(sparkSubmitParams, that.sparkSubmitParams) + && Objects.equals(sqlParams, that.sqlParams); + } + + @Override + public int hashCode() { + return Objects.hash( + dbtCommands, + jarParams, + notebookParams, + pipelineParams, + pythonNamedParams, + pythonParams, + sparkSubmitParams, + sqlParams); + } + + @Override + public String toString() { + return new ToStringer(RunParametersPb.class) + .add("dbtCommands", dbtCommands) + .add("jarParams", jarParams) + .add("notebookParams", notebookParams) + .add("pipelineParams", pipelineParams) + .add("pythonNamedParams", pythonNamedParams) + .add("pythonParams", pythonParams) + .add("sparkSubmitParams", sparkSubmitParams) + .add("sqlParams", sqlParams) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunPb.java new file mode 100755 index 000000000..1fd16595e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunPb.java @@ -0,0 +1,556 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Run was retrieved successfully */ +@Generated +class RunPb { + @JsonProperty("attempt_number") + private Long attemptNumber; + + @JsonProperty("cleanup_duration") + private Long cleanupDuration; + + @JsonProperty("cluster_instance") + private ClusterInstance clusterInstance; + + @JsonProperty("cluster_spec") + private ClusterSpec clusterSpec; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("description") + private String description; + + @JsonProperty("effective_performance_target") + private PerformanceTarget effectivePerformanceTarget; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("execution_duration") + private Long executionDuration; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("has_more") + private Boolean hasMore; + + @JsonProperty("iterations") + private Collection iterations; + + @JsonProperty("job_clusters") + private Collection jobClusters; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("job_parameters") + private Collection jobParameters; + + @JsonProperty("job_run_id") + private Long jobRunId; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("number_in_job") + private Long numberInJob; + + @JsonProperty("original_attempt_run_id") + private Long originalAttemptRunId; + + @JsonProperty("overriding_parameters") + private RunParameters overridingParameters; + + @JsonProperty("queue_duration") + private Long queueDuration; + + @JsonProperty("repair_history") + private Collection repairHistory; + + @JsonProperty("run_duration") + private Long runDuration; + + @JsonProperty("run_id") + private Long runId; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("run_page_url") + private String runPageUrl; + + @JsonProperty("run_type") + private RunType runType; + + @JsonProperty("schedule") + private CronSchedule schedule; + + @JsonProperty("setup_duration") + private Long setupDuration; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("state") + private RunState state; + + @JsonProperty("status") + private RunStatus status; + + @JsonProperty("tasks") + private Collection tasks; + + @JsonProperty("trigger") + private TriggerType trigger; + + @JsonProperty("trigger_info") + private TriggerInfo triggerInfo; + + public RunPb setAttemptNumber(Long attemptNumber) { + this.attemptNumber = attemptNumber; + return this; + } + + public Long getAttemptNumber() { + return attemptNumber; + } + + public RunPb setCleanupDuration(Long cleanupDuration) { + this.cleanupDuration = cleanupDuration; + return this; + } + + public Long getCleanupDuration() { + return cleanupDuration; + } + + public RunPb setClusterInstance(ClusterInstance clusterInstance) { + this.clusterInstance = clusterInstance; + return this; + } + + public ClusterInstance getClusterInstance() { + return clusterInstance; + } + + public RunPb setClusterSpec(ClusterSpec clusterSpec) { + this.clusterSpec = clusterSpec; + return this; + } + + public ClusterSpec getClusterSpec() { + return clusterSpec; + } + + public RunPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public RunPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public RunPb setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) { + this.effectivePerformanceTarget = effectivePerformanceTarget; + return this; + } + + public PerformanceTarget getEffectivePerformanceTarget() { + return effectivePerformanceTarget; + } + + public RunPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public RunPb setExecutionDuration(Long executionDuration) { + this.executionDuration = executionDuration; + return this; + } + + public Long getExecutionDuration() { + return executionDuration; + } + + public RunPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public RunPb setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + + public RunPb setIterations(Collection iterations) { + this.iterations = iterations; + return this; + } + + public Collection getIterations() { + return iterations; + } + + public RunPb setJobClusters(Collection jobClusters) { + this.jobClusters = jobClusters; + return this; + } + + public Collection getJobClusters() { + return jobClusters; + } + + public RunPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public RunPb setJobParameters(Collection jobParameters) { + this.jobParameters = jobParameters; + return this; + } + + public Collection getJobParameters() { + return jobParameters; + } + + public RunPb setJobRunId(Long jobRunId) { + this.jobRunId = jobRunId; + return this; + } + + public Long getJobRunId() { + return jobRunId; + } + + public RunPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public RunPb setNumberInJob(Long numberInJob) { + this.numberInJob = numberInJob; + return this; + } + + public Long getNumberInJob() { + return numberInJob; + } + + public RunPb setOriginalAttemptRunId(Long originalAttemptRunId) { + this.originalAttemptRunId = originalAttemptRunId; + return this; + } + + public Long getOriginalAttemptRunId() { + return originalAttemptRunId; + } + + public RunPb setOverridingParameters(RunParameters overridingParameters) { + this.overridingParameters = overridingParameters; + return this; + } + + public RunParameters getOverridingParameters() { + return overridingParameters; + } + + public RunPb setQueueDuration(Long queueDuration) { + this.queueDuration = queueDuration; + return this; + } + + public Long getQueueDuration() { + return queueDuration; + } + + public RunPb setRepairHistory(Collection repairHistory) { + this.repairHistory = repairHistory; + return this; + } + + public Collection getRepairHistory() { + return repairHistory; + } + + public RunPb setRunDuration(Long runDuration) { + this.runDuration = runDuration; + return this; + } + + public Long getRunDuration() { + return runDuration; + } + + public RunPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + public RunPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public RunPb setRunPageUrl(String runPageUrl) { + this.runPageUrl = runPageUrl; + return this; + } + + public String getRunPageUrl() { + return runPageUrl; + } + + public RunPb setRunType(RunType runType) { + this.runType = runType; + return this; + } + + public RunType getRunType() { + return runType; + } + + public RunPb setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public RunPb setSetupDuration(Long setupDuration) { + this.setupDuration = setupDuration; + return this; + } + + public Long getSetupDuration() { + return setupDuration; + } + + public RunPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public RunPb setState(RunState state) { + this.state = state; + return this; + } + + public RunState getState() { + return state; + } + + public RunPb setStatus(RunStatus status) { + this.status = status; + return this; + } + + public RunStatus getStatus() { + return status; + } + + public RunPb setTasks(Collection tasks) { + this.tasks = tasks; + return this; + } + + public Collection getTasks() { + return tasks; + } + + public RunPb setTrigger(TriggerType trigger) { + this.trigger = trigger; + return this; + } + + public TriggerType getTrigger() { + return trigger; + } + + public RunPb setTriggerInfo(TriggerInfo triggerInfo) { + this.triggerInfo = triggerInfo; + return this; + } + + public TriggerInfo getTriggerInfo() { + return triggerInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunPb that = (RunPb) o; + return Objects.equals(attemptNumber, that.attemptNumber) + && Objects.equals(cleanupDuration, that.cleanupDuration) + && Objects.equals(clusterInstance, that.clusterInstance) + && Objects.equals(clusterSpec, that.clusterSpec) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(description, that.description) + && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget) + && Objects.equals(endTime, that.endTime) + && Objects.equals(executionDuration, that.executionDuration) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(hasMore, that.hasMore) + && Objects.equals(iterations, that.iterations) + && Objects.equals(jobClusters, that.jobClusters) + && Objects.equals(jobId, that.jobId) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(jobRunId, that.jobRunId) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(numberInJob, that.numberInJob) + && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) + && Objects.equals(overridingParameters, that.overridingParameters) + && Objects.equals(queueDuration, that.queueDuration) + && Objects.equals(repairHistory, that.repairHistory) + && Objects.equals(runDuration, that.runDuration) + && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) + && Objects.equals(runPageUrl, that.runPageUrl) + && Objects.equals(runType, that.runType) + && Objects.equals(schedule, that.schedule) + && Objects.equals(setupDuration, that.setupDuration) + && Objects.equals(startTime, that.startTime) + && Objects.equals(state, that.state) + && Objects.equals(status, that.status) + && Objects.equals(tasks, that.tasks) + && Objects.equals(trigger, that.trigger) + && Objects.equals(triggerInfo, that.triggerInfo); + } + + @Override + public int hashCode() { + return Objects.hash( + attemptNumber, + cleanupDuration, + clusterInstance, + clusterSpec, + creatorUserName, + description, + effectivePerformanceTarget, + endTime, + executionDuration, + gitSource, + hasMore, + iterations, + jobClusters, + jobId, + jobParameters, + jobRunId, + nextPageToken, + numberInJob, + originalAttemptRunId, + overridingParameters, + queueDuration, + repairHistory, + runDuration, + runId, + runName, + runPageUrl, + runType, + schedule, + setupDuration, + startTime, + state, + status, + tasks, + trigger, + triggerInfo); + } + + @Override + public String toString() { + return new ToStringer(RunPb.class) + .add("attemptNumber", attemptNumber) + .add("cleanupDuration", cleanupDuration) + .add("clusterInstance", clusterInstance) + .add("clusterSpec", clusterSpec) + .add("creatorUserName", creatorUserName) + .add("description", description) + .add("effectivePerformanceTarget", effectivePerformanceTarget) + .add("endTime", endTime) + .add("executionDuration", executionDuration) + .add("gitSource", gitSource) + .add("hasMore", hasMore) + .add("iterations", iterations) + .add("jobClusters", jobClusters) + .add("jobId", jobId) + .add("jobParameters", jobParameters) + .add("jobRunId", jobRunId) + .add("nextPageToken", nextPageToken) + .add("numberInJob", numberInJob) + .add("originalAttemptRunId", originalAttemptRunId) + .add("overridingParameters", overridingParameters) + .add("queueDuration", queueDuration) + .add("repairHistory", repairHistory) + .add("runDuration", runDuration) + .add("runId", runId) + .add("runName", runName) + .add("runPageUrl", runPageUrl) + .add("runType", runType) + .add("schedule", schedule) + .add("setupDuration", setupDuration) + .add("startTime", startTime) + .add("state", state) + .add("status", status) + .add("tasks", tasks) + .add("trigger", trigger) + .add("triggerInfo", triggerInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunState.java index 47489db01..d3b859321 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunState.java @@ -4,42 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The current state of the run. */ @Generated +@JsonSerialize(using = RunState.RunStateSerializer.class) +@JsonDeserialize(using = RunState.RunStateDeserializer.class) public class RunState { /** * A value indicating the run's current lifecycle state. This field is always available in the * response. Note: Additional states might be introduced in future releases. */ - @JsonProperty("life_cycle_state") private RunLifeCycleState lifeCycleState; /** The reason indicating why the run was queued. */ - @JsonProperty("queue_reason") private String queueReason; /** * A value indicating the run's result. This field is only available for terminal lifecycle * states. Note: Additional states might be introduced in future releases. */ - @JsonProperty("result_state") private RunResultState resultState; /** * A descriptive message for the current state. This field is unstructured, and its exact format * is subject to change. */ - @JsonProperty("state_message") private String stateMessage; /** * A value indicating whether a run was canceled manually by a user or by the scheduler because * the run timed out. */ - @JsonProperty("user_cancelled_or_timedout") private Boolean userCancelledOrTimedout; public RunState setLifeCycleState(RunLifeCycleState lifeCycleState) { @@ -115,4 +121,45 @@ public String toString() { .add("userCancelledOrTimedout", userCancelledOrTimedout) .toString(); } + + RunStatePb toPb() { + RunStatePb pb = new RunStatePb(); + pb.setLifeCycleState(lifeCycleState); + pb.setQueueReason(queueReason); + pb.setResultState(resultState); + pb.setStateMessage(stateMessage); + pb.setUserCancelledOrTimedout(userCancelledOrTimedout); + + return pb; + } + + static RunState fromPb(RunStatePb pb) { + RunState model = new RunState(); + model.setLifeCycleState(pb.getLifeCycleState()); + model.setQueueReason(pb.getQueueReason()); + model.setResultState(pb.getResultState()); + model.setStateMessage(pb.getStateMessage()); + model.setUserCancelledOrTimedout(pb.getUserCancelledOrTimedout()); + + return model; + } + + public static class RunStateSerializer extends JsonSerializer { + @Override + public void serialize(RunState value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunStatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunStateDeserializer extends JsonDeserializer { + @Override + public RunState deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunStatePb pb = mapper.readValue(p, RunStatePb.class); + return RunState.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatePb.java new file mode 100755 index 000000000..7516aba42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The current state of the run. */ +@Generated +class RunStatePb { + @JsonProperty("life_cycle_state") + private RunLifeCycleState lifeCycleState; + + @JsonProperty("queue_reason") + private String queueReason; + + @JsonProperty("result_state") + private RunResultState resultState; + + @JsonProperty("state_message") + private String stateMessage; + + @JsonProperty("user_cancelled_or_timedout") + private Boolean userCancelledOrTimedout; + + public RunStatePb setLifeCycleState(RunLifeCycleState lifeCycleState) { + this.lifeCycleState = lifeCycleState; + return this; + } + + public RunLifeCycleState getLifeCycleState() { + return lifeCycleState; + } + + public RunStatePb setQueueReason(String queueReason) { + this.queueReason = queueReason; + return this; + } + + public String getQueueReason() { + return queueReason; + } + + public RunStatePb setResultState(RunResultState resultState) { + this.resultState = resultState; + return this; + } + + public RunResultState getResultState() { + return resultState; + } + + public RunStatePb setStateMessage(String stateMessage) { + this.stateMessage = stateMessage; + return this; + } + + public String getStateMessage() { + return stateMessage; + } + + public RunStatePb setUserCancelledOrTimedout(Boolean userCancelledOrTimedout) { + this.userCancelledOrTimedout = userCancelledOrTimedout; + return this; + } + + public Boolean getUserCancelledOrTimedout() { + return userCancelledOrTimedout; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunStatePb that = (RunStatePb) o; + return Objects.equals(lifeCycleState, that.lifeCycleState) + && Objects.equals(queueReason, that.queueReason) + && Objects.equals(resultState, that.resultState) + && Objects.equals(stateMessage, that.stateMessage) + && Objects.equals(userCancelledOrTimedout, that.userCancelledOrTimedout); + } + + @Override + public int hashCode() { + return Objects.hash( + lifeCycleState, queueReason, resultState, stateMessage, userCancelledOrTimedout); + } + + @Override + public String toString() { + return new ToStringer(RunStatePb.class) + .add("lifeCycleState", lifeCycleState) + .add("queueReason", queueReason) + .add("resultState", resultState) + .add("stateMessage", stateMessage) + .add("userCancelledOrTimedout", userCancelledOrTimedout) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java index e9219f045..671ec91fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The current status of the run */ @Generated +@JsonSerialize(using = RunStatus.RunStatusSerializer.class) +@JsonDeserialize(using = RunStatus.RunStatusDeserializer.class) public class RunStatus { /** If the run was queued, details about the reason for queuing the run. */ - @JsonProperty("queue_details") private QueueDetails queueDetails; /** The current state of the run. */ - @JsonProperty("state") private RunLifecycleStateV2State state; /** * If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating * the run. */ - @JsonProperty("termination_details") private TerminationDetails terminationDetails; public RunStatus setQueueDetails(QueueDetails queueDetails) { @@ -75,4 +83,41 @@ public String toString() { .add("terminationDetails", terminationDetails) .toString(); } + + RunStatusPb toPb() { + RunStatusPb pb = new RunStatusPb(); + pb.setQueueDetails(queueDetails); + pb.setState(state); + pb.setTerminationDetails(terminationDetails); + + return pb; + } + + static RunStatus fromPb(RunStatusPb pb) { + RunStatus model = new RunStatus(); + model.setQueueDetails(pb.getQueueDetails()); + model.setState(pb.getState()); + model.setTerminationDetails(pb.getTerminationDetails()); + + return model; + } + + public static class RunStatusSerializer extends JsonSerializer { + @Override + public void serialize(RunStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunStatusDeserializer extends JsonDeserializer { + @Override + public RunStatus deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunStatusPb pb = mapper.readValue(p, RunStatusPb.class); + return RunStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatusPb.java new file mode 100755 index 000000000..2b1278e19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatusPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The current status of the run */ +@Generated +class RunStatusPb { + @JsonProperty("queue_details") + private QueueDetails queueDetails; + + @JsonProperty("state") + private RunLifecycleStateV2State state; + + @JsonProperty("termination_details") + private TerminationDetails terminationDetails; + + public RunStatusPb setQueueDetails(QueueDetails queueDetails) { + this.queueDetails = queueDetails; + return this; + } + + public QueueDetails getQueueDetails() { + return queueDetails; + } + + public RunStatusPb setState(RunLifecycleStateV2State state) { + this.state = state; + return this; + } + + public RunLifecycleStateV2State getState() { + return state; + } + + public RunStatusPb setTerminationDetails(TerminationDetails terminationDetails) { + this.terminationDetails = terminationDetails; + return this; + } + + public TerminationDetails getTerminationDetails() { + return terminationDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunStatusPb that = (RunStatusPb) o; + return Objects.equals(queueDetails, that.queueDetails) + && Objects.equals(state, that.state) + && Objects.equals(terminationDetails, that.terminationDetails); + } + + @Override + public int hashCode() { + return Objects.hash(queueDetails, state, terminationDetails); + } + + @Override + public String toString() { + return new ToStringer(RunStatusPb.class) + .add("queueDetails", queueDetails) + .add("state", state) + .add("terminationDetails", terminationDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index ce8826caf..890d9f0a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Used when outputting a child run, in GetRun or ListRuns. */ @Generated +@JsonSerialize(using = RunTask.RunTaskSerializer.class) +@JsonDeserialize(using = RunTask.RunTaskDeserializer.class) public class RunTask { /** * The sequence number of this run attempt for a triggered job run. The initial attempt of a run @@ -18,7 +29,6 @@ public class RunTask { * original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they * succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job. */ - @JsonProperty("attempt_number") private Long attemptNumber; /** @@ -26,7 +36,6 @@ public class RunTask { * *

[clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html */ - @JsonProperty("clean_rooms_notebook_task") private CleanRoomsNotebookTask cleanRoomsNotebookTask; /** @@ -35,14 +44,12 @@ public class RunTask { * and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. * The total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("cleanup_duration") private Long cleanupDuration; /** * The cluster used for this run. If the run is specified to use a new cluster, this field is set * once the Jobs service has requested a cluster for the run. */ - @JsonProperty("cluster_instance") private ClusterInstance clusterInstance; /** @@ -50,18 +57,18 @@ public class RunTask { * the `condition_task` field is present. The condition task does not require a cluster to execute * and does not support retries or notifications. */ - @JsonProperty("condition_task") private RunConditionTask conditionTask; /** The task refreshes a dashboard and sends a snapshot to subscribers. */ - @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ - @JsonProperty("dbt_task") private DbtTask dbtTask; /** @@ -69,15 +76,12 @@ public class RunTask { * in this field must complete successfully before executing this task. The key is `task_key`, and * the value is the name assigned to the dependent task. */ - @JsonProperty("depends_on") private Collection dependsOn; /** An optional description for this task. */ - @JsonProperty("description") private String description; /** Deprecated, field was never used in production. */ - @JsonProperty("disabled") private Boolean disabled; /** @@ -89,28 +93,24 @@ public class RunTask { * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and * optimized cluster performance. */ - @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; /** * An optional set of email addresses notified when the task run begins or completes. The default * behavior is to not send any emails. */ - @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. */ - @JsonProperty("end_time") private Long endTime; /** * The key that references an environment spec in a job. This field is required for Python script, * Python wheel and dbt tasks when using serverless compute. */ - @JsonProperty("environment_key") private String environmentKey; /** @@ -120,7 +120,6 @@ public class RunTask { * `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("execution_duration") private Long executionDuration; /** @@ -128,18 +127,15 @@ public class RunTask { * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops * responding. We suggest running jobs and tasks on new clusters for greater reliability */ - @JsonProperty("existing_cluster_id") private String existingClusterId; /** * The task executes a nested task for every input provided when the `for_each_task` field is * present. */ - @JsonProperty("for_each_task") private RunForEachTask forEachTask; /** */ - @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; /** @@ -150,69 +146,56 @@ public class RunTask { * task. Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** * If job_cluster_key, this task is executed reusing the cluster specified in * `job.settings.job_clusters`. */ - @JsonProperty("job_cluster_key") private String jobClusterKey; /** * An optional list of libraries to be installed on the cluster. The default value is an empty * list. */ - @JsonProperty("libraries") private Collection libraries; /** If new_cluster, a description of a new cluster that is created for each run. */ - @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; /** The task runs a notebook when the `notebook_task` field is present. */ - @JsonProperty("notebook_task") private NotebookTask notebookTask; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this task run. */ - @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; /** * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines * configured to use triggered more are supported. */ - @JsonProperty("pipeline_task") private PipelineTask pipelineTask; /** * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. */ - @JsonProperty("power_bi_task") private PowerBiTask powerBiTask; /** The task runs a Python wheel when the `python_wheel_task` field is present. */ - @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; /** The time in milliseconds that the run has spent in the queue. */ - @JsonProperty("queue_duration") private Long queueDuration; /** Parameter values including resolved references */ - @JsonProperty("resolved_values") private ResolvedValues resolvedValues; /** The time in milliseconds it took the job run and all of its repairs to finish. */ - @JsonProperty("run_duration") private Long runDuration; /** The ID of the task run. */ - @JsonProperty("run_id") private Long runId; /** @@ -220,15 +203,12 @@ public class RunTask { * its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See * :method:jobs/create for a list of possible values. */ - @JsonProperty("run_if") private RunIf runIf; /** The task triggers another job when the `run_job_task` field is present. */ - @JsonProperty("run_job_task") private RunJobTask runJobTask; /** */ - @JsonProperty("run_page_url") private String runPageUrl; /** @@ -238,15 +218,12 @@ public class RunTask { * the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The * total duration of a multitask job run is the value of the `run_duration` field. */ - @JsonProperty("setup_duration") private Long setupDuration; /** The task runs a JAR when the `spark_jar_task` field is present. */ - @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; /** The task runs a Python file when the `spark_python_task` field is present. */ - @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** @@ -266,14 +243,12 @@ public class RunTask { * *

The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. */ - @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; /** * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when * the `sql_task` field is present. */ - @JsonProperty("sql_task") private SqlTask sqlTask; /** @@ -281,15 +256,12 @@ public class RunTask { * This may not be the time when the job task starts executing, for example, if the job is * scheduled to run on a new cluster, this is the time the cluster creation call is issued. */ - @JsonProperty("start_time") private Long startTime; /** Deprecated. Please use the `status` field instead. */ - @JsonProperty("state") private RunState state; /** The current status of the run */ - @JsonProperty("status") private RunStatus status; /** @@ -297,11 +269,9 @@ public class RunTask { * field is required and must be unique within its parent job. On Update or Reset, this field is * used to reference the tasks to be updated or reset. */ - @JsonProperty("task_key") private String taskKey; /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** @@ -309,7 +279,6 @@ public class RunTask { * behavior is to not send any system notifications. Task webhooks respect the task notification * settings. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; public RunTask setAttemptNumber(Long attemptNumber) { @@ -366,6 +335,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public RunTask setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public RunTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -728,6 +706,7 @@ public boolean equals(Object o) { && Objects.equals(clusterInstance, that.clusterInstance) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -778,6 +757,7 @@ public int hashCode() { clusterInstance, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -828,6 +808,7 @@ public String toString() { .add("clusterInstance", clusterInstance) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -869,4 +850,127 @@ public String toString() { .add("webhookNotifications", webhookNotifications) .toString(); } + + RunTaskPb toPb() { + RunTaskPb pb = new RunTaskPb(); + pb.setAttemptNumber(attemptNumber); + pb.setCleanRoomsNotebookTask(cleanRoomsNotebookTask); + pb.setCleanupDuration(cleanupDuration); + pb.setClusterInstance(clusterInstance); + pb.setConditionTask(conditionTask); + pb.setDashboardTask(dashboardTask); + pb.setDbtCloudTask(dbtCloudTask); + pb.setDbtTask(dbtTask); + pb.setDependsOn(dependsOn); + pb.setDescription(description); + pb.setDisabled(disabled); + pb.setEffectivePerformanceTarget(effectivePerformanceTarget); + pb.setEmailNotifications(emailNotifications); + pb.setEndTime(endTime); + pb.setEnvironmentKey(environmentKey); + pb.setExecutionDuration(executionDuration); + pb.setExistingClusterId(existingClusterId); + pb.setForEachTask(forEachTask); + pb.setGenAiComputeTask(genAiComputeTask); + pb.setGitSource(gitSource); + pb.setJobClusterKey(jobClusterKey); + pb.setLibraries(libraries); + pb.setNewCluster(newCluster); + pb.setNotebookTask(notebookTask); + pb.setNotificationSettings(notificationSettings); + pb.setPipelineTask(pipelineTask); + pb.setPowerBiTask(powerBiTask); + pb.setPythonWheelTask(pythonWheelTask); + pb.setQueueDuration(queueDuration); + pb.setResolvedValues(resolvedValues); + pb.setRunDuration(runDuration); + pb.setRunId(runId); + pb.setRunIf(runIf); + pb.setRunJobTask(runJobTask); + pb.setRunPageUrl(runPageUrl); + pb.setSetupDuration(setupDuration); + pb.setSparkJarTask(sparkJarTask); + pb.setSparkPythonTask(sparkPythonTask); + pb.setSparkSubmitTask(sparkSubmitTask); + pb.setSqlTask(sqlTask); + pb.setStartTime(startTime); + pb.setState(state); + pb.setStatus(status); + pb.setTaskKey(taskKey); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setWebhookNotifications(webhookNotifications); + + return pb; + } + + static RunTask fromPb(RunTaskPb pb) { + RunTask model = new RunTask(); + model.setAttemptNumber(pb.getAttemptNumber()); + model.setCleanRoomsNotebookTask(pb.getCleanRoomsNotebookTask()); + model.setCleanupDuration(pb.getCleanupDuration()); + model.setClusterInstance(pb.getClusterInstance()); + model.setConditionTask(pb.getConditionTask()); + model.setDashboardTask(pb.getDashboardTask()); + model.setDbtCloudTask(pb.getDbtCloudTask()); + model.setDbtTask(pb.getDbtTask()); + model.setDependsOn(pb.getDependsOn()); + model.setDescription(pb.getDescription()); + model.setDisabled(pb.getDisabled()); + model.setEffectivePerformanceTarget(pb.getEffectivePerformanceTarget()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEndTime(pb.getEndTime()); + model.setEnvironmentKey(pb.getEnvironmentKey()); + model.setExecutionDuration(pb.getExecutionDuration()); + model.setExistingClusterId(pb.getExistingClusterId()); + model.setForEachTask(pb.getForEachTask()); + model.setGenAiComputeTask(pb.getGenAiComputeTask()); + model.setGitSource(pb.getGitSource()); + model.setJobClusterKey(pb.getJobClusterKey()); + model.setLibraries(pb.getLibraries()); + model.setNewCluster(pb.getNewCluster()); + model.setNotebookTask(pb.getNotebookTask()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setPipelineTask(pb.getPipelineTask()); + model.setPowerBiTask(pb.getPowerBiTask()); + model.setPythonWheelTask(pb.getPythonWheelTask()); + model.setQueueDuration(pb.getQueueDuration()); + model.setResolvedValues(pb.getResolvedValues()); + model.setRunDuration(pb.getRunDuration()); + model.setRunId(pb.getRunId()); + model.setRunIf(pb.getRunIf()); + model.setRunJobTask(pb.getRunJobTask()); + model.setRunPageUrl(pb.getRunPageUrl()); + model.setSetupDuration(pb.getSetupDuration()); + model.setSparkJarTask(pb.getSparkJarTask()); + model.setSparkPythonTask(pb.getSparkPythonTask()); + model.setSparkSubmitTask(pb.getSparkSubmitTask()); + model.setSqlTask(pb.getSqlTask()); + model.setStartTime(pb.getStartTime()); + model.setState(pb.getState()); + model.setStatus(pb.getStatus()); + model.setTaskKey(pb.getTaskKey()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + + return model; + } + + public static class RunTaskSerializer extends JsonSerializer { + @Override + public void serialize(RunTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunTaskDeserializer extends JsonDeserializer { + @Override + public RunTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunTaskPb pb = mapper.readValue(p, RunTaskPb.class); + return RunTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTaskPb.java new file mode 100755 index 000000000..a8831f8ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTaskPb.java @@ -0,0 +1,721 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Used when outputting a child run, in GetRun or ListRuns. */ +@Generated +class RunTaskPb { + @JsonProperty("attempt_number") + private Long attemptNumber; + + @JsonProperty("clean_rooms_notebook_task") + private CleanRoomsNotebookTask cleanRoomsNotebookTask; + + @JsonProperty("cleanup_duration") + private Long cleanupDuration; + + @JsonProperty("cluster_instance") + private ClusterInstance clusterInstance; + + @JsonProperty("condition_task") + private RunConditionTask conditionTask; + + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + + @JsonProperty("dbt_task") + private DbtTask dbtTask; + + @JsonProperty("depends_on") + private Collection dependsOn; + + @JsonProperty("description") + private String description; + + @JsonProperty("disabled") + private Boolean disabled; + + @JsonProperty("effective_performance_target") + private PerformanceTarget effectivePerformanceTarget; + + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("environment_key") + private String environmentKey; + + @JsonProperty("execution_duration") + private Long executionDuration; + + @JsonProperty("existing_cluster_id") + private String existingClusterId; + + @JsonProperty("for_each_task") + private RunForEachTask forEachTask; + + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("new_cluster") + private com.databricks.sdk.service.compute.ClusterSpec newCluster; + + @JsonProperty("notebook_task") + private NotebookTask notebookTask; + + @JsonProperty("notification_settings") + private TaskNotificationSettings notificationSettings; + + @JsonProperty("pipeline_task") + private PipelineTask pipelineTask; + + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + + @JsonProperty("python_wheel_task") + private PythonWheelTask pythonWheelTask; + + @JsonProperty("queue_duration") + private Long queueDuration; + + @JsonProperty("resolved_values") + private ResolvedValues resolvedValues; + + @JsonProperty("run_duration") + private Long runDuration; + + @JsonProperty("run_id") + private Long runId; + + @JsonProperty("run_if") + private RunIf runIf; + + @JsonProperty("run_job_task") + private RunJobTask runJobTask; + + @JsonProperty("run_page_url") + private String runPageUrl; + + @JsonProperty("setup_duration") + private Long setupDuration; + + @JsonProperty("spark_jar_task") + private SparkJarTask sparkJarTask; + + @JsonProperty("spark_python_task") + private SparkPythonTask sparkPythonTask; + + @JsonProperty("spark_submit_task") + private SparkSubmitTask sparkSubmitTask; + + @JsonProperty("sql_task") + private SqlTask sqlTask; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("state") + private RunState state; + + @JsonProperty("status") + private RunStatus status; + + @JsonProperty("task_key") + private String taskKey; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + public RunTaskPb setAttemptNumber(Long attemptNumber) { + this.attemptNumber = attemptNumber; + return this; + } + + public Long getAttemptNumber() { + return attemptNumber; + } + + public RunTaskPb setCleanRoomsNotebookTask(CleanRoomsNotebookTask cleanRoomsNotebookTask) { + this.cleanRoomsNotebookTask = cleanRoomsNotebookTask; + return this; + } + + public CleanRoomsNotebookTask getCleanRoomsNotebookTask() { + return cleanRoomsNotebookTask; + } + + public RunTaskPb setCleanupDuration(Long cleanupDuration) { + this.cleanupDuration = cleanupDuration; + return this; + } + + public Long getCleanupDuration() { + return cleanupDuration; + } + + public RunTaskPb setClusterInstance(ClusterInstance clusterInstance) { + this.clusterInstance = clusterInstance; + return this; + } + + public ClusterInstance getClusterInstance() { + return clusterInstance; + } + + public RunTaskPb setConditionTask(RunConditionTask conditionTask) { + this.conditionTask = conditionTask; + return this; + } + + public RunConditionTask getConditionTask() { + return conditionTask; + } + + public RunTaskPb setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + + public RunTaskPb setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + + public RunTaskPb setDbtTask(DbtTask dbtTask) { + this.dbtTask = dbtTask; + return this; + } + + public DbtTask getDbtTask() { + return dbtTask; + } + + public RunTaskPb setDependsOn(Collection dependsOn) { + this.dependsOn = dependsOn; + return this; + } + + public Collection getDependsOn() { + return dependsOn; + } + + public RunTaskPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public RunTaskPb setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public RunTaskPb setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) { + this.effectivePerformanceTarget = effectivePerformanceTarget; + return this; + } + + public PerformanceTarget getEffectivePerformanceTarget() { + return effectivePerformanceTarget; + } + + public RunTaskPb setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public RunTaskPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public RunTaskPb setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + + public RunTaskPb setExecutionDuration(Long executionDuration) { + this.executionDuration = executionDuration; + return this; + } + + public Long getExecutionDuration() { + return executionDuration; + } + + public RunTaskPb setExistingClusterId(String existingClusterId) { + this.existingClusterId = existingClusterId; + return this; + } + + public String getExistingClusterId() { + return existingClusterId; + } + + public RunTaskPb setForEachTask(RunForEachTask forEachTask) { + this.forEachTask = forEachTask; + return this; + } + + public RunForEachTask getForEachTask() { + return forEachTask; + } + + public RunTaskPb setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + + public RunTaskPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public RunTaskPb setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + + public RunTaskPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public RunTaskPb setNewCluster(com.databricks.sdk.service.compute.ClusterSpec newCluster) { + this.newCluster = newCluster; + return this; + } + + public com.databricks.sdk.service.compute.ClusterSpec getNewCluster() { + return newCluster; + } + + public RunTaskPb setNotebookTask(NotebookTask notebookTask) { + this.notebookTask = notebookTask; + return this; + } + + public NotebookTask getNotebookTask() { + return notebookTask; + } + + public RunTaskPb setNotificationSettings(TaskNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public TaskNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public RunTaskPb setPipelineTask(PipelineTask pipelineTask) { + this.pipelineTask = pipelineTask; + return this; + } + + public PipelineTask getPipelineTask() { + return pipelineTask; + } + + public RunTaskPb setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + + public RunTaskPb setPythonWheelTask(PythonWheelTask pythonWheelTask) { + this.pythonWheelTask = pythonWheelTask; + return this; + } + + public PythonWheelTask getPythonWheelTask() { + return pythonWheelTask; + } + + public RunTaskPb setQueueDuration(Long queueDuration) { + this.queueDuration = queueDuration; + return this; + } + + public Long getQueueDuration() { + return queueDuration; + } + + public RunTaskPb setResolvedValues(ResolvedValues resolvedValues) { + this.resolvedValues = resolvedValues; + return this; + } + + public ResolvedValues getResolvedValues() { + return resolvedValues; + } + + public RunTaskPb setRunDuration(Long runDuration) { + this.runDuration = runDuration; + return this; + } + + public Long getRunDuration() { + return runDuration; + } + + public RunTaskPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + public RunTaskPb setRunIf(RunIf runIf) { + this.runIf = runIf; + return this; + } + + public RunIf getRunIf() { + return runIf; + } + + public RunTaskPb setRunJobTask(RunJobTask runJobTask) { + this.runJobTask = runJobTask; + return this; + } + + public RunJobTask getRunJobTask() { + return runJobTask; + } + + public RunTaskPb setRunPageUrl(String runPageUrl) { + this.runPageUrl = runPageUrl; + return this; + } + + public String getRunPageUrl() { + return runPageUrl; + } + + public RunTaskPb setSetupDuration(Long setupDuration) { + this.setupDuration = setupDuration; + return this; + } + + public Long getSetupDuration() { + return setupDuration; + } + + public RunTaskPb setSparkJarTask(SparkJarTask sparkJarTask) { + this.sparkJarTask = sparkJarTask; + return this; + } + + public SparkJarTask getSparkJarTask() { + return sparkJarTask; + } + + public RunTaskPb setSparkPythonTask(SparkPythonTask sparkPythonTask) { + this.sparkPythonTask = sparkPythonTask; + return this; + } + + public SparkPythonTask getSparkPythonTask() { + return sparkPythonTask; + } + + public RunTaskPb setSparkSubmitTask(SparkSubmitTask sparkSubmitTask) { + this.sparkSubmitTask = sparkSubmitTask; + return this; + } + + public SparkSubmitTask getSparkSubmitTask() { + return sparkSubmitTask; + } + + public RunTaskPb setSqlTask(SqlTask sqlTask) { + this.sqlTask = sqlTask; + return this; + } + + public SqlTask getSqlTask() { + return sqlTask; + } + + public RunTaskPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public RunTaskPb setState(RunState state) { + this.state = state; + return this; + } + + public RunState getState() { + return state; + } + + public RunTaskPb setStatus(RunStatus status) { + this.status = status; + return this; + } + + public RunStatus getStatus() { + return status; + } + + public RunTaskPb setTaskKey(String taskKey) { + this.taskKey = taskKey; + return this; + } + + public String getTaskKey() { + return taskKey; + } + + public RunTaskPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public RunTaskPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunTaskPb that = (RunTaskPb) o; + return Objects.equals(attemptNumber, that.attemptNumber) + && Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) + && Objects.equals(cleanupDuration, that.cleanupDuration) + && Objects.equals(clusterInstance, that.clusterInstance) + && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) + && Objects.equals(dbtTask, that.dbtTask) + && Objects.equals(dependsOn, that.dependsOn) + && Objects.equals(description, that.description) + && Objects.equals(disabled, that.disabled) + && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(endTime, that.endTime) + && Objects.equals(environmentKey, that.environmentKey) + && Objects.equals(executionDuration, that.executionDuration) + && Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(jobClusterKey, that.jobClusterKey) + && Objects.equals(libraries, that.libraries) + && Objects.equals(newCluster, that.newCluster) + && Objects.equals(notebookTask, that.notebookTask) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) + && Objects.equals(pythonWheelTask, that.pythonWheelTask) + && Objects.equals(queueDuration, that.queueDuration) + && Objects.equals(resolvedValues, that.resolvedValues) + && Objects.equals(runDuration, that.runDuration) + && Objects.equals(runId, that.runId) + && Objects.equals(runIf, that.runIf) + && Objects.equals(runJobTask, that.runJobTask) + && Objects.equals(runPageUrl, that.runPageUrl) + && Objects.equals(setupDuration, that.setupDuration) + && Objects.equals(sparkJarTask, that.sparkJarTask) + && Objects.equals(sparkPythonTask, that.sparkPythonTask) + && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) + && Objects.equals(sqlTask, that.sqlTask) + && Objects.equals(startTime, that.startTime) + && Objects.equals(state, that.state) + && Objects.equals(status, that.status) + && Objects.equals(taskKey, that.taskKey) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); + } + + @Override + public int hashCode() { + return Objects.hash( + attemptNumber, + cleanRoomsNotebookTask, + cleanupDuration, + clusterInstance, + conditionTask, + dashboardTask, + dbtCloudTask, + dbtTask, + dependsOn, + description, + disabled, + effectivePerformanceTarget, + emailNotifications, + endTime, + environmentKey, + executionDuration, + existingClusterId, + forEachTask, + genAiComputeTask, + gitSource, + jobClusterKey, + libraries, + newCluster, + notebookTask, + notificationSettings, + pipelineTask, + powerBiTask, + pythonWheelTask, + queueDuration, + resolvedValues, + runDuration, + runId, + runIf, + runJobTask, + runPageUrl, + setupDuration, + sparkJarTask, + sparkPythonTask, + sparkSubmitTask, + sqlTask, + startTime, + state, + status, + taskKey, + timeoutSeconds, + webhookNotifications); + } + + @Override + public String toString() { + return new ToStringer(RunTaskPb.class) + .add("attemptNumber", attemptNumber) + .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) + .add("cleanupDuration", cleanupDuration) + .add("clusterInstance", clusterInstance) + .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) + .add("dbtTask", dbtTask) + .add("dependsOn", dependsOn) + .add("description", description) + .add("disabled", disabled) + .add("effectivePerformanceTarget", effectivePerformanceTarget) + .add("emailNotifications", emailNotifications) + .add("endTime", endTime) + .add("environmentKey", environmentKey) + .add("executionDuration", executionDuration) + .add("existingClusterId", existingClusterId) + .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) + .add("gitSource", gitSource) + .add("jobClusterKey", jobClusterKey) + .add("libraries", libraries) + .add("newCluster", newCluster) + .add("notebookTask", notebookTask) + .add("notificationSettings", notificationSettings) + .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) + .add("pythonWheelTask", pythonWheelTask) + .add("queueDuration", queueDuration) + .add("resolvedValues", resolvedValues) + .add("runDuration", runDuration) + .add("runId", runId) + .add("runIf", runIf) + .add("runJobTask", runJobTask) + .add("runPageUrl", runPageUrl) + .add("setupDuration", setupDuration) + .add("sparkJarTask", sparkJarTask) + .add("sparkPythonTask", sparkPythonTask) + .add("sparkSubmitTask", sparkSubmitTask) + .add("sqlTask", sqlTask) + .add("startTime", startTime) + .add("state", state) + .add("status", status) + .add("taskKey", taskKey) + .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java index 4978f8544..a44408245 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SparkJarTask.SparkJarTaskSerializer.class) +@JsonDeserialize(using = SparkJarTask.SparkJarTaskDeserializer.class) public class SparkJarTask { /** * Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an * example, see :method:jobs/create. */ - @JsonProperty("jar_uri") private String jarUri; /** @@ -24,7 +34,6 @@ public class SparkJarTask { *

The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of * the job fail. */ - @JsonProperty("main_class_name") private String mainClassName; /** @@ -34,11 +43,9 @@ public class SparkJarTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("parameters") private Collection parameters; /** Deprecated. A value of `false` is no longer supported. */ - @JsonProperty("run_as_repl") private Boolean runAsRepl; public SparkJarTask setJarUri(String jarUri) { @@ -102,4 +109,43 @@ public String toString() { .add("runAsRepl", runAsRepl) .toString(); } + + SparkJarTaskPb toPb() { + SparkJarTaskPb pb = new SparkJarTaskPb(); + pb.setJarUri(jarUri); + pb.setMainClassName(mainClassName); + pb.setParameters(parameters); + pb.setRunAsRepl(runAsRepl); + + return pb; + } + + static SparkJarTask fromPb(SparkJarTaskPb pb) { + SparkJarTask model = new SparkJarTask(); + model.setJarUri(pb.getJarUri()); + model.setMainClassName(pb.getMainClassName()); + model.setParameters(pb.getParameters()); + model.setRunAsRepl(pb.getRunAsRepl()); + + return model; + } + + public static class SparkJarTaskSerializer extends JsonSerializer { + @Override + public void serialize(SparkJarTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkJarTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkJarTaskDeserializer extends JsonDeserializer { + @Override + public SparkJarTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkJarTaskPb pb = mapper.readValue(p, SparkJarTaskPb.class); + return SparkJarTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTaskPb.java new file mode 100755 index 000000000..17cb5dcba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTaskPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SparkJarTaskPb { + @JsonProperty("jar_uri") + private String jarUri; + + @JsonProperty("main_class_name") + private String mainClassName; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("run_as_repl") + private Boolean runAsRepl; + + public SparkJarTaskPb setJarUri(String jarUri) { + this.jarUri = jarUri; + return this; + } + + public String getJarUri() { + return jarUri; + } + + public SparkJarTaskPb setMainClassName(String mainClassName) { + this.mainClassName = mainClassName; + return this; + } + + public String getMainClassName() { + return mainClassName; + } + + public SparkJarTaskPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public SparkJarTaskPb setRunAsRepl(Boolean runAsRepl) { + this.runAsRepl = runAsRepl; + return this; + } + + public Boolean getRunAsRepl() { + return runAsRepl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkJarTaskPb that = (SparkJarTaskPb) o; + return Objects.equals(jarUri, that.jarUri) + && Objects.equals(mainClassName, that.mainClassName) + && Objects.equals(parameters, that.parameters) + && Objects.equals(runAsRepl, that.runAsRepl); + } + + @Override + public int hashCode() { + return Objects.hash(jarUri, mainClassName, parameters, runAsRepl); + } + + @Override + public String toString() { + return new ToStringer(SparkJarTaskPb.class) + .add("jarUri", jarUri) + .add("mainClassName", mainClassName) + .add("parameters", parameters) + .add("runAsRepl", runAsRepl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java index 0c69b9ca9..b3acb8723 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SparkPythonTask.SparkPythonTaskSerializer.class) +@JsonDeserialize(using = SparkPythonTask.SparkPythonTaskDeserializer.class) public class SparkPythonTask { /** * Command line parameters passed to the Python file. @@ -17,7 +28,6 @@ public class SparkPythonTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("parameters") private Collection parameters; /** @@ -26,7 +36,6 @@ public class SparkPythonTask { * must be absolute and begin with `/`. For files stored in a remote repository, the path must be * relative. This field is required. */ - @JsonProperty("python_file") private String pythonFile; /** @@ -38,7 +47,6 @@ public class SparkPythonTask { *

* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem * URI. * `GIT`: The Python file is located in a remote Git repository. */ - @JsonProperty("source") private Source source; public SparkPythonTask setParameters(Collection parameters) { @@ -91,4 +99,42 @@ public String toString() { .add("source", source) .toString(); } + + SparkPythonTaskPb toPb() { + SparkPythonTaskPb pb = new SparkPythonTaskPb(); + pb.setParameters(parameters); + pb.setPythonFile(pythonFile); + pb.setSource(source); + + return pb; + } + + static SparkPythonTask fromPb(SparkPythonTaskPb pb) { + SparkPythonTask model = new SparkPythonTask(); + model.setParameters(pb.getParameters()); + model.setPythonFile(pb.getPythonFile()); + model.setSource(pb.getSource()); + + return model; + } + + public static class SparkPythonTaskSerializer extends JsonSerializer { + @Override + public void serialize(SparkPythonTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkPythonTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkPythonTaskDeserializer extends JsonDeserializer { + @Override + public SparkPythonTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkPythonTaskPb pb = mapper.readValue(p, SparkPythonTaskPb.class); + return SparkPythonTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTaskPb.java new file mode 100755 index 000000000..8d89b7840 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTaskPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SparkPythonTaskPb { + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("python_file") + private String pythonFile; + + @JsonProperty("source") + private Source source; + + public SparkPythonTaskPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public SparkPythonTaskPb setPythonFile(String pythonFile) { + this.pythonFile = pythonFile; + return this; + } + + public String getPythonFile() { + return pythonFile; + } + + public SparkPythonTaskPb setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkPythonTaskPb that = (SparkPythonTaskPb) o; + return Objects.equals(parameters, that.parameters) + && Objects.equals(pythonFile, that.pythonFile) + && Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(parameters, pythonFile, source); + } + + @Override + public String toString() { + return new ToStringer(SparkPythonTaskPb.class) + .add("parameters", parameters) + .add("pythonFile", pythonFile) + .add("source", source) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java index 422b67934..8c204b3f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SparkSubmitTask.SparkSubmitTaskSerializer.class) +@JsonDeserialize(using = SparkSubmitTask.SparkSubmitTaskDeserializer.class) public class SparkSubmitTask { /** * Command-line parameters passed to spark submit. @@ -17,7 +28,6 @@ public class SparkSubmitTask { * *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ - @JsonProperty("parameters") private Collection parameters; public SparkSubmitTask setParameters(Collection parameters) { @@ -46,4 +56,38 @@ public int hashCode() { public String toString() { return new ToStringer(SparkSubmitTask.class).add("parameters", parameters).toString(); } + + SparkSubmitTaskPb toPb() { + SparkSubmitTaskPb pb = new SparkSubmitTaskPb(); + pb.setParameters(parameters); + + return pb; + } + + static SparkSubmitTask fromPb(SparkSubmitTaskPb pb) { + SparkSubmitTask model = new SparkSubmitTask(); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class SparkSubmitTaskSerializer extends JsonSerializer { + @Override + public void serialize(SparkSubmitTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparkSubmitTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparkSubmitTaskDeserializer extends JsonDeserializer { + @Override + public SparkSubmitTask deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparkSubmitTaskPb pb = mapper.readValue(p, SparkSubmitTaskPb.class); + return SparkSubmitTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTaskPb.java new file mode 100755 index 000000000..7257f5e2e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTaskPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SparkSubmitTaskPb { + @JsonProperty("parameters") + private Collection parameters; + + public SparkSubmitTaskPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparkSubmitTaskPb that = (SparkSubmitTaskPb) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(SparkSubmitTaskPb.class).add("parameters", parameters).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java index 1810d913e..a470c4567 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SqlAlertOutput.SqlAlertOutputSerializer.class) +@JsonDeserialize(using = SqlAlertOutput.SqlAlertOutputDeserializer.class) public class SqlAlertOutput { /** * The state of the SQL alert. @@ -16,26 +27,21 @@ public class SqlAlertOutput { *

* UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger * conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions */ - @JsonProperty("alert_state") private SqlAlertState alertState; /** The link to find the output results. */ - @JsonProperty("output_link") private String outputLink; /** * The text of the SQL query. Can Run permission of the SQL query associated with the SQL alert is * required to view this field. */ - @JsonProperty("query_text") private String queryText; /** Information about SQL statements executed in the run. */ - @JsonProperty("sql_statements") private Collection sqlStatements; /** The canonical identifier of the SQL warehouse. */ - @JsonProperty("warehouse_id") private String warehouseId; public SqlAlertOutput setAlertState(SqlAlertState alertState) { @@ -110,4 +116,46 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + SqlAlertOutputPb toPb() { + SqlAlertOutputPb pb = new SqlAlertOutputPb(); + pb.setAlertState(alertState); + pb.setOutputLink(outputLink); + pb.setQueryText(queryText); + pb.setSqlStatements(sqlStatements); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static SqlAlertOutput fromPb(SqlAlertOutputPb pb) { + SqlAlertOutput model = new SqlAlertOutput(); + model.setAlertState(pb.getAlertState()); + model.setOutputLink(pb.getOutputLink()); + model.setQueryText(pb.getQueryText()); + model.setSqlStatements(pb.getSqlStatements()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class SqlAlertOutputSerializer extends JsonSerializer { + @Override + public void serialize(SqlAlertOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlAlertOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlAlertOutputDeserializer extends JsonDeserializer { + @Override + public SqlAlertOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlAlertOutputPb pb = mapper.readValue(p, SqlAlertOutputPb.class); + return SqlAlertOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutputPb.java new file mode 100755 index 000000000..9512abf56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutputPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SqlAlertOutputPb { + @JsonProperty("alert_state") + private SqlAlertState alertState; + + @JsonProperty("output_link") + private String outputLink; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("sql_statements") + private Collection sqlStatements; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public SqlAlertOutputPb setAlertState(SqlAlertState alertState) { + this.alertState = alertState; + return this; + } + + public SqlAlertState getAlertState() { + return alertState; + } + + public SqlAlertOutputPb setOutputLink(String outputLink) { + this.outputLink = outputLink; + return this; + } + + public String getOutputLink() { + return outputLink; + } + + public SqlAlertOutputPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public SqlAlertOutputPb setSqlStatements(Collection sqlStatements) { + this.sqlStatements = sqlStatements; + return this; + } + + public Collection getSqlStatements() { + return sqlStatements; + } + + public SqlAlertOutputPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlAlertOutputPb that = (SqlAlertOutputPb) o; + return Objects.equals(alertState, that.alertState) + && Objects.equals(outputLink, that.outputLink) + && Objects.equals(queryText, that.queryText) + && Objects.equals(sqlStatements, that.sqlStatements) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(alertState, outputLink, queryText, sqlStatements, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(SqlAlertOutputPb.class) + .add("alertState", alertState) + .add("outputLink", outputLink) + .add("queryText", queryText) + .add("sqlStatements", sqlStatements) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutput.java index da1d13585..e414e38a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutput.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SqlDashboardOutput.SqlDashboardOutputSerializer.class) +@JsonDeserialize(using = SqlDashboardOutput.SqlDashboardOutputDeserializer.class) public class SqlDashboardOutput { /** The canonical identifier of the SQL warehouse. */ - @JsonProperty("warehouse_id") private String warehouseId; /** Widgets executed in the run. Only SQL query based widgets are listed. */ - @JsonProperty("widgets") private Collection widgets; public SqlDashboardOutput setWarehouseId(String warehouseId) { @@ -56,4 +65,40 @@ public String toString() { .add("widgets", widgets) .toString(); } + + SqlDashboardOutputPb toPb() { + SqlDashboardOutputPb pb = new SqlDashboardOutputPb(); + pb.setWarehouseId(warehouseId); + pb.setWidgets(widgets); + + return pb; + } + + static SqlDashboardOutput fromPb(SqlDashboardOutputPb pb) { + SqlDashboardOutput model = new SqlDashboardOutput(); + model.setWarehouseId(pb.getWarehouseId()); + model.setWidgets(pb.getWidgets()); + + return model; + } + + public static class SqlDashboardOutputSerializer extends JsonSerializer { + @Override + public void serialize(SqlDashboardOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlDashboardOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlDashboardOutputDeserializer extends JsonDeserializer { + @Override + public SqlDashboardOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlDashboardOutputPb pb = mapper.readValue(p, SqlDashboardOutputPb.class); + return SqlDashboardOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutputPb.java new file mode 100755 index 000000000..8a8b84e69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardOutputPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SqlDashboardOutputPb { + @JsonProperty("warehouse_id") + private String warehouseId; + + @JsonProperty("widgets") + private Collection widgets; + + public SqlDashboardOutputPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + public SqlDashboardOutputPb setWidgets(Collection widgets) { + this.widgets = widgets; + return this; + } + + public Collection getWidgets() { + return widgets; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlDashboardOutputPb that = (SqlDashboardOutputPb) o; + return Objects.equals(warehouseId, that.warehouseId) && Objects.equals(widgets, that.widgets); + } + + @Override + public int hashCode() { + return Objects.hash(warehouseId, widgets); + } + + @Override + public String toString() { + return new ToStringer(SqlDashboardOutputPb.class) + .add("warehouseId", warehouseId) + .add("widgets", widgets) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutput.java index 27856c16a..8c60dedb7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutput.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlDashboardWidgetOutput.SqlDashboardWidgetOutputSerializer.class) +@JsonDeserialize(using = SqlDashboardWidgetOutput.SqlDashboardWidgetOutputDeserializer.class) public class SqlDashboardWidgetOutput { /** Time (in epoch milliseconds) when execution of the SQL widget ends. */ - @JsonProperty("end_time") private Long endTime; /** The information about the error when execution fails. */ - @JsonProperty("error") private SqlOutputError error; /** The link to find the output results. */ - @JsonProperty("output_link") private String outputLink; /** Time (in epoch milliseconds) when execution of the SQL widget starts. */ - @JsonProperty("start_time") private Long startTime; /** The execution status of the SQL widget. */ - @JsonProperty("status") private SqlDashboardWidgetOutputStatus status; /** The canonical identifier of the SQL widget. */ - @JsonProperty("widget_id") private String widgetId; /** The title of the SQL widget. */ - @JsonProperty("widget_title") private String widgetTitle; public SqlDashboardWidgetOutput setEndTime(Long endTime) { @@ -131,4 +135,53 @@ public String toString() { .add("widgetTitle", widgetTitle) .toString(); } + + SqlDashboardWidgetOutputPb toPb() { + SqlDashboardWidgetOutputPb pb = new SqlDashboardWidgetOutputPb(); + pb.setEndTime(endTime); + pb.setError(error); + pb.setOutputLink(outputLink); + pb.setStartTime(startTime); + pb.setStatus(status); + pb.setWidgetId(widgetId); + pb.setWidgetTitle(widgetTitle); + + return pb; + } + + static SqlDashboardWidgetOutput fromPb(SqlDashboardWidgetOutputPb pb) { + SqlDashboardWidgetOutput model = new SqlDashboardWidgetOutput(); + model.setEndTime(pb.getEndTime()); + model.setError(pb.getError()); + model.setOutputLink(pb.getOutputLink()); + model.setStartTime(pb.getStartTime()); + model.setStatus(pb.getStatus()); + model.setWidgetId(pb.getWidgetId()); + model.setWidgetTitle(pb.getWidgetTitle()); + + return model; + } + + public static class SqlDashboardWidgetOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + SqlDashboardWidgetOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlDashboardWidgetOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlDashboardWidgetOutputDeserializer + extends JsonDeserializer { + @Override + public SqlDashboardWidgetOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlDashboardWidgetOutputPb pb = mapper.readValue(p, SqlDashboardWidgetOutputPb.class); + return SqlDashboardWidgetOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputPb.java new file mode 100755 index 000000000..496f5f42a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputPb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlDashboardWidgetOutputPb { + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("error") + private SqlOutputError error; + + @JsonProperty("output_link") + private String outputLink; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("status") + private SqlDashboardWidgetOutputStatus status; + + @JsonProperty("widget_id") + private String widgetId; + + @JsonProperty("widget_title") + private String widgetTitle; + + public SqlDashboardWidgetOutputPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public SqlDashboardWidgetOutputPb setError(SqlOutputError error) { + this.error = error; + return this; + } + + public SqlOutputError getError() { + return error; + } + + public SqlDashboardWidgetOutputPb setOutputLink(String outputLink) { + this.outputLink = outputLink; + return this; + } + + public String getOutputLink() { + return outputLink; + } + + public SqlDashboardWidgetOutputPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public SqlDashboardWidgetOutputPb setStatus(SqlDashboardWidgetOutputStatus status) { + this.status = status; + return this; + } + + public SqlDashboardWidgetOutputStatus getStatus() { + return status; + } + + public SqlDashboardWidgetOutputPb setWidgetId(String widgetId) { + this.widgetId = widgetId; + return this; + } + + public String getWidgetId() { + return widgetId; + } + + public SqlDashboardWidgetOutputPb setWidgetTitle(String widgetTitle) { + this.widgetTitle = widgetTitle; + return this; + } + + public String getWidgetTitle() { + return widgetTitle; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlDashboardWidgetOutputPb that = (SqlDashboardWidgetOutputPb) o; + return Objects.equals(endTime, that.endTime) + && Objects.equals(error, that.error) + && Objects.equals(outputLink, that.outputLink) + && Objects.equals(startTime, that.startTime) + && Objects.equals(status, that.status) + && Objects.equals(widgetId, that.widgetId) + && Objects.equals(widgetTitle, that.widgetTitle); + } + + @Override + public int hashCode() { + return Objects.hash(endTime, error, outputLink, startTime, status, widgetId, widgetTitle); + } + + @Override + public String toString() { + return new ToStringer(SqlDashboardWidgetOutputPb.class) + .add("endTime", endTime) + .add("error", error) + .add("outputLink", outputLink) + .add("startTime", startTime) + .add("status", status) + .add("widgetId", widgetId) + .add("widgetTitle", widgetTitle) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutput.java index 0ce85de12..3dd3d130e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutput.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlOutput.SqlOutputSerializer.class) +@JsonDeserialize(using = SqlOutput.SqlOutputDeserializer.class) public class SqlOutput { /** The output of a SQL alert task, if available. */ - @JsonProperty("alert_output") private SqlAlertOutput alertOutput; /** The output of a SQL dashboard task, if available. */ - @JsonProperty("dashboard_output") private SqlDashboardOutput dashboardOutput; /** The output of a SQL query task, if available. */ - @JsonProperty("query_output") private SqlQueryOutput queryOutput; public SqlOutput setAlertOutput(SqlAlertOutput alertOutput) { @@ -71,4 +79,41 @@ public String toString() { .add("queryOutput", queryOutput) .toString(); } + + SqlOutputPb toPb() { + SqlOutputPb pb = new SqlOutputPb(); + pb.setAlertOutput(alertOutput); + pb.setDashboardOutput(dashboardOutput); + pb.setQueryOutput(queryOutput); + + return pb; + } + + static SqlOutput fromPb(SqlOutputPb pb) { + SqlOutput model = new SqlOutput(); + model.setAlertOutput(pb.getAlertOutput()); + model.setDashboardOutput(pb.getDashboardOutput()); + model.setQueryOutput(pb.getQueryOutput()); + + return model; + } + + public static class SqlOutputSerializer extends JsonSerializer { + @Override + public void serialize(SqlOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlOutputDeserializer extends JsonDeserializer { + @Override + public SqlOutput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlOutputPb pb = mapper.readValue(p, SqlOutputPb.class); + return SqlOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputError.java index 9e45ec7db..4c37438d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputError.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlOutputError.SqlOutputErrorSerializer.class) +@JsonDeserialize(using = SqlOutputError.SqlOutputErrorDeserializer.class) public class SqlOutputError { /** The error message when execution fails. */ - @JsonProperty("message") private String message; public SqlOutputError setMessage(String message) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(SqlOutputError.class).add("message", message).toString(); } + + SqlOutputErrorPb toPb() { + SqlOutputErrorPb pb = new SqlOutputErrorPb(); + pb.setMessage(message); + + return pb; + } + + static SqlOutputError fromPb(SqlOutputErrorPb pb) { + SqlOutputError model = new SqlOutputError(); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class SqlOutputErrorSerializer extends JsonSerializer { + @Override + public void serialize(SqlOutputError value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlOutputErrorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlOutputErrorDeserializer extends JsonDeserializer { + @Override + public SqlOutputError deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlOutputErrorPb pb = mapper.readValue(p, SqlOutputErrorPb.class); + return SqlOutputError.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputErrorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputErrorPb.java new file mode 100755 index 000000000..080687c88 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputErrorPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlOutputErrorPb { + @JsonProperty("message") + private String message; + + public SqlOutputErrorPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlOutputErrorPb that = (SqlOutputErrorPb) o; + return Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(message); + } + + @Override + public String toString() { + return new ToStringer(SqlOutputErrorPb.class).add("message", message).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputPb.java new file mode 100755 index 000000000..8ae5c31cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlOutputPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlOutputPb { + @JsonProperty("alert_output") + private SqlAlertOutput alertOutput; + + @JsonProperty("dashboard_output") + private SqlDashboardOutput dashboardOutput; + + @JsonProperty("query_output") + private SqlQueryOutput queryOutput; + + public SqlOutputPb setAlertOutput(SqlAlertOutput alertOutput) { + this.alertOutput = alertOutput; + return this; + } + + public SqlAlertOutput getAlertOutput() { + return alertOutput; + } + + public SqlOutputPb setDashboardOutput(SqlDashboardOutput dashboardOutput) { + this.dashboardOutput = dashboardOutput; + return this; + } + + public SqlDashboardOutput getDashboardOutput() { + return dashboardOutput; + } + + public SqlOutputPb setQueryOutput(SqlQueryOutput queryOutput) { + this.queryOutput = queryOutput; + return this; + } + + public SqlQueryOutput getQueryOutput() { + return queryOutput; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlOutputPb that = (SqlOutputPb) o; + return Objects.equals(alertOutput, that.alertOutput) + && Objects.equals(dashboardOutput, that.dashboardOutput) + && Objects.equals(queryOutput, that.queryOutput); + } + + @Override + public int hashCode() { + return Objects.hash(alertOutput, dashboardOutput, queryOutput); + } + + @Override + public String toString() { + return new ToStringer(SqlOutputPb.class) + .add("alertOutput", alertOutput) + .add("dashboardOutput", dashboardOutput) + .add("queryOutput", queryOutput) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java index d540662e4..9ad4e720b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SqlQueryOutput.SqlQueryOutputSerializer.class) +@JsonDeserialize(using = SqlQueryOutput.SqlQueryOutputDeserializer.class) public class SqlQueryOutput { /** */ - @JsonProperty("endpoint_id") private String endpointId; /** The link to find the output results. */ - @JsonProperty("output_link") private String outputLink; /** * The text of the SQL query. Can Run permission of the SQL query is required to view this field. */ - @JsonProperty("query_text") private String queryText; /** Information about SQL statements executed in the run. */ - @JsonProperty("sql_statements") private Collection sqlStatements; /** The canonical identifier of the SQL warehouse. */ - @JsonProperty("warehouse_id") private String warehouseId; public SqlQueryOutput setEndpointId(String endpointId) { @@ -104,4 +110,46 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + SqlQueryOutputPb toPb() { + SqlQueryOutputPb pb = new SqlQueryOutputPb(); + pb.setEndpointId(endpointId); + pb.setOutputLink(outputLink); + pb.setQueryText(queryText); + pb.setSqlStatements(sqlStatements); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static SqlQueryOutput fromPb(SqlQueryOutputPb pb) { + SqlQueryOutput model = new SqlQueryOutput(); + model.setEndpointId(pb.getEndpointId()); + model.setOutputLink(pb.getOutputLink()); + model.setQueryText(pb.getQueryText()); + model.setSqlStatements(pb.getSqlStatements()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class SqlQueryOutputSerializer extends JsonSerializer { + @Override + public void serialize(SqlQueryOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlQueryOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlQueryOutputDeserializer extends JsonDeserializer { + @Override + public SqlQueryOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlQueryOutputPb pb = mapper.readValue(p, SqlQueryOutputPb.class); + return SqlQueryOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutputPb.java new file mode 100755 index 000000000..3390b6070 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutputPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SqlQueryOutputPb { + @JsonProperty("endpoint_id") + private String endpointId; + + @JsonProperty("output_link") + private String outputLink; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("sql_statements") + private Collection sqlStatements; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public SqlQueryOutputPb setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public SqlQueryOutputPb setOutputLink(String outputLink) { + this.outputLink = outputLink; + return this; + } + + public String getOutputLink() { + return outputLink; + } + + public SqlQueryOutputPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public SqlQueryOutputPb setSqlStatements(Collection sqlStatements) { + this.sqlStatements = sqlStatements; + return this; + } + + public Collection getSqlStatements() { + return sqlStatements; + } + + public SqlQueryOutputPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlQueryOutputPb that = (SqlQueryOutputPb) o; + return Objects.equals(endpointId, that.endpointId) + && Objects.equals(outputLink, that.outputLink) + && Objects.equals(queryText, that.queryText) + && Objects.equals(sqlStatements, that.sqlStatements) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(endpointId, outputLink, queryText, sqlStatements, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(SqlQueryOutputPb.class) + .add("endpointId", endpointId) + .add("outputLink", outputLink) + .add("queryText", queryText) + .add("sqlStatements", sqlStatements) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutput.java index 922bf0fdb..2c4a4fc50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutput.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlStatementOutput.SqlStatementOutputSerializer.class) +@JsonDeserialize(using = SqlStatementOutput.SqlStatementOutputDeserializer.class) public class SqlStatementOutput { /** A key that can be used to look up query details. */ - @JsonProperty("lookup_key") private String lookupKey; public SqlStatementOutput setLookupKey(String lookupKey) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(SqlStatementOutput.class).add("lookupKey", lookupKey).toString(); } + + SqlStatementOutputPb toPb() { + SqlStatementOutputPb pb = new SqlStatementOutputPb(); + pb.setLookupKey(lookupKey); + + return pb; + } + + static SqlStatementOutput fromPb(SqlStatementOutputPb pb) { + SqlStatementOutput model = new SqlStatementOutput(); + model.setLookupKey(pb.getLookupKey()); + + return model; + } + + public static class SqlStatementOutputSerializer extends JsonSerializer { + @Override + public void serialize(SqlStatementOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlStatementOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlStatementOutputDeserializer extends JsonDeserializer { + @Override + public SqlStatementOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlStatementOutputPb pb = mapper.readValue(p, SqlStatementOutputPb.class); + return SqlStatementOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutputPb.java new file mode 100755 index 000000000..4e6e52e91 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlStatementOutputPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlStatementOutputPb { + @JsonProperty("lookup_key") + private String lookupKey; + + public SqlStatementOutputPb setLookupKey(String lookupKey) { + this.lookupKey = lookupKey; + return this; + } + + public String getLookupKey() { + return lookupKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlStatementOutputPb that = (SqlStatementOutputPb) o; + return Objects.equals(lookupKey, that.lookupKey); + } + + @Override + public int hashCode() { + return Objects.hash(lookupKey); + } + + @Override + public String toString() { + return new ToStringer(SqlStatementOutputPb.class).add("lookupKey", lookupKey).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java index 7522a3bc5..80e2a9c45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTask.SqlTaskSerializer.class) +@JsonDeserialize(using = SqlTask.SqlTaskDeserializer.class) public class SqlTask { /** If alert, indicates that this job must refresh a SQL alert. */ - @JsonProperty("alert") private SqlTaskAlert alert; /** If dashboard, indicates that this job must refresh a SQL dashboard. */ - @JsonProperty("dashboard") private SqlTaskDashboard dashboard; /** If file, indicates that this job runs a SQL file in a remote Git repository. */ - @JsonProperty("file") private SqlTaskFile file; /** * Parameters to be used for each run of this job. The SQL alert task does not support custom * parameters. */ - @JsonProperty("parameters") private Map parameters; /** If query, indicates that this job must execute a SQL query. */ - @JsonProperty("query") private SqlTaskQuery query; /** @@ -38,7 +44,6 @@ public class SqlTask { * warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks * and are limited to scheduled single-task jobs. */ - @JsonProperty("warehouse_id") private String warehouseId; public SqlTask setAlert(SqlTaskAlert alert) { @@ -124,4 +129,47 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + SqlTaskPb toPb() { + SqlTaskPb pb = new SqlTaskPb(); + pb.setAlert(alert); + pb.setDashboard(dashboard); + pb.setFile(file); + pb.setParameters(parameters); + pb.setQuery(query); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static SqlTask fromPb(SqlTaskPb pb) { + SqlTask model = new SqlTask(); + model.setAlert(pb.getAlert()); + model.setDashboard(pb.getDashboard()); + model.setFile(pb.getFile()); + model.setParameters(pb.getParameters()); + model.setQuery(pb.getQuery()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class SqlTaskSerializer extends JsonSerializer { + @Override + public void serialize(SqlTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskDeserializer extends JsonDeserializer { + @Override + public SqlTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskPb pb = mapper.readValue(p, SqlTaskPb.class); + return SqlTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlert.java index 7f34a9c64..0415d000b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlert.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTaskAlert.SqlTaskAlertSerializer.class) +@JsonDeserialize(using = SqlTaskAlert.SqlTaskAlertDeserializer.class) public class SqlTaskAlert { /** The canonical identifier of the SQL alert. */ - @JsonProperty("alert_id") private String alertId; /** If true, the alert notifications are not sent to subscribers. */ - @JsonProperty("pause_subscriptions") private Boolean pauseSubscriptions; /** If specified, alert notifications are sent to subscribers. */ - @JsonProperty("subscriptions") private Collection subscriptions; public SqlTaskAlert setAlertId(String alertId) { @@ -72,4 +80,41 @@ public String toString() { .add("subscriptions", subscriptions) .toString(); } + + SqlTaskAlertPb toPb() { + SqlTaskAlertPb pb = new SqlTaskAlertPb(); + pb.setAlertId(alertId); + pb.setPauseSubscriptions(pauseSubscriptions); + pb.setSubscriptions(subscriptions); + + return pb; + } + + static SqlTaskAlert fromPb(SqlTaskAlertPb pb) { + SqlTaskAlert model = new SqlTaskAlert(); + model.setAlertId(pb.getAlertId()); + model.setPauseSubscriptions(pb.getPauseSubscriptions()); + model.setSubscriptions(pb.getSubscriptions()); + + return model; + } + + public static class SqlTaskAlertSerializer extends JsonSerializer { + @Override + public void serialize(SqlTaskAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskAlertDeserializer extends JsonDeserializer { + @Override + public SqlTaskAlert deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskAlertPb pb = mapper.readValue(p, SqlTaskAlertPb.class); + return SqlTaskAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlertPb.java new file mode 100755 index 000000000..a6ebd84f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskAlertPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SqlTaskAlertPb { + @JsonProperty("alert_id") + private String alertId; + + @JsonProperty("pause_subscriptions") + private Boolean pauseSubscriptions; + + @JsonProperty("subscriptions") + private Collection subscriptions; + + public SqlTaskAlertPb setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + public SqlTaskAlertPb setPauseSubscriptions(Boolean pauseSubscriptions) { + this.pauseSubscriptions = pauseSubscriptions; + return this; + } + + public Boolean getPauseSubscriptions() { + return pauseSubscriptions; + } + + public SqlTaskAlertPb setSubscriptions(Collection subscriptions) { + this.subscriptions = subscriptions; + return this; + } + + public Collection getSubscriptions() { + return subscriptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskAlertPb that = (SqlTaskAlertPb) o; + return Objects.equals(alertId, that.alertId) + && Objects.equals(pauseSubscriptions, that.pauseSubscriptions) + && Objects.equals(subscriptions, that.subscriptions); + } + + @Override + public int hashCode() { + return Objects.hash(alertId, pauseSubscriptions, subscriptions); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskAlertPb.class) + .add("alertId", alertId) + .add("pauseSubscriptions", pauseSubscriptions) + .add("subscriptions", subscriptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboard.java index 4bff6691f..34e280581 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboard.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTaskDashboard.SqlTaskDashboardSerializer.class) +@JsonDeserialize(using = SqlTaskDashboard.SqlTaskDashboardDeserializer.class) public class SqlTaskDashboard { /** Subject of the email sent to subscribers of this task. */ - @JsonProperty("custom_subject") private String customSubject; /** The canonical identifier of the SQL dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; /** If true, the dashboard snapshot is not taken, and emails are not sent to subscribers. */ - @JsonProperty("pause_subscriptions") private Boolean pauseSubscriptions; /** If specified, dashboard snapshots are sent to subscriptions. */ - @JsonProperty("subscriptions") private Collection subscriptions; public SqlTaskDashboard setCustomSubject(String customSubject) { @@ -87,4 +94,44 @@ public String toString() { .add("subscriptions", subscriptions) .toString(); } + + SqlTaskDashboardPb toPb() { + SqlTaskDashboardPb pb = new SqlTaskDashboardPb(); + pb.setCustomSubject(customSubject); + pb.setDashboardId(dashboardId); + pb.setPauseSubscriptions(pauseSubscriptions); + pb.setSubscriptions(subscriptions); + + return pb; + } + + static SqlTaskDashboard fromPb(SqlTaskDashboardPb pb) { + SqlTaskDashboard model = new SqlTaskDashboard(); + model.setCustomSubject(pb.getCustomSubject()); + model.setDashboardId(pb.getDashboardId()); + model.setPauseSubscriptions(pb.getPauseSubscriptions()); + model.setSubscriptions(pb.getSubscriptions()); + + return model; + } + + public static class SqlTaskDashboardSerializer extends JsonSerializer { + @Override + public void serialize(SqlTaskDashboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskDashboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskDashboardDeserializer extends JsonDeserializer { + @Override + public SqlTaskDashboard deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskDashboardPb pb = mapper.readValue(p, SqlTaskDashboardPb.class); + return SqlTaskDashboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboardPb.java new file mode 100755 index 000000000..93c3b7a49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboardPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SqlTaskDashboardPb { + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("pause_subscriptions") + private Boolean pauseSubscriptions; + + @JsonProperty("subscriptions") + private Collection subscriptions; + + public SqlTaskDashboardPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public SqlTaskDashboardPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public SqlTaskDashboardPb setPauseSubscriptions(Boolean pauseSubscriptions) { + this.pauseSubscriptions = pauseSubscriptions; + return this; + } + + public Boolean getPauseSubscriptions() { + return pauseSubscriptions; + } + + public SqlTaskDashboardPb setSubscriptions(Collection subscriptions) { + this.subscriptions = subscriptions; + return this; + } + + public Collection getSubscriptions() { + return subscriptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskDashboardPb that = (SqlTaskDashboardPb) o; + return Objects.equals(customSubject, that.customSubject) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(pauseSubscriptions, that.pauseSubscriptions) + && Objects.equals(subscriptions, that.subscriptions); + } + + @Override + public int hashCode() { + return Objects.hash(customSubject, dashboardId, pauseSubscriptions, subscriptions); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskDashboardPb.class) + .add("customSubject", customSubject) + .add("dashboardId", dashboardId) + .add("pauseSubscriptions", pauseSubscriptions) + .add("subscriptions", subscriptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java index 0560efe12..0da2dbdce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTaskFile.SqlTaskFileSerializer.class) +@JsonDeserialize(using = SqlTaskFile.SqlTaskFileDeserializer.class) public class SqlTaskFile { /** * Path of the SQL file. Must be relative if the source is a remote Git repository and absolute * for workspace paths. */ - @JsonProperty("path") private String path; /** @@ -25,7 +35,6 @@ public class SqlTaskFile { *

* `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in * cloud Git provider. */ - @JsonProperty("source") private Source source; public SqlTaskFile setPath(String path) { @@ -63,4 +72,39 @@ public int hashCode() { public String toString() { return new ToStringer(SqlTaskFile.class).add("path", path).add("source", source).toString(); } + + SqlTaskFilePb toPb() { + SqlTaskFilePb pb = new SqlTaskFilePb(); + pb.setPath(path); + pb.setSource(source); + + return pb; + } + + static SqlTaskFile fromPb(SqlTaskFilePb pb) { + SqlTaskFile model = new SqlTaskFile(); + model.setPath(pb.getPath()); + model.setSource(pb.getSource()); + + return model; + } + + public static class SqlTaskFileSerializer extends JsonSerializer { + @Override + public void serialize(SqlTaskFile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskFilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskFileDeserializer extends JsonDeserializer { + @Override + public SqlTaskFile deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskFilePb pb = mapper.readValue(p, SqlTaskFilePb.class); + return SqlTaskFile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFilePb.java new file mode 100755 index 000000000..fd1b9a27b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFilePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlTaskFilePb { + @JsonProperty("path") + private String path; + + @JsonProperty("source") + private Source source; + + public SqlTaskFilePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public SqlTaskFilePb setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskFilePb that = (SqlTaskFilePb) o; + return Objects.equals(path, that.path) && Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(path, source); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskFilePb.class).add("path", path).add("source", source).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskPb.java new file mode 100755 index 000000000..471634d5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class SqlTaskPb { + @JsonProperty("alert") + private SqlTaskAlert alert; + + @JsonProperty("dashboard") + private SqlTaskDashboard dashboard; + + @JsonProperty("file") + private SqlTaskFile file; + + @JsonProperty("parameters") + private Map parameters; + + @JsonProperty("query") + private SqlTaskQuery query; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public SqlTaskPb setAlert(SqlTaskAlert alert) { + this.alert = alert; + return this; + } + + public SqlTaskAlert getAlert() { + return alert; + } + + public SqlTaskPb setDashboard(SqlTaskDashboard dashboard) { + this.dashboard = dashboard; + return this; + } + + public SqlTaskDashboard getDashboard() { + return dashboard; + } + + public SqlTaskPb setFile(SqlTaskFile file) { + this.file = file; + return this; + } + + public SqlTaskFile getFile() { + return file; + } + + public SqlTaskPb setParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + public Map getParameters() { + return parameters; + } + + public SqlTaskPb setQuery(SqlTaskQuery query) { + this.query = query; + return this; + } + + public SqlTaskQuery getQuery() { + return query; + } + + public SqlTaskPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskPb that = (SqlTaskPb) o; + return Objects.equals(alert, that.alert) + && Objects.equals(dashboard, that.dashboard) + && Objects.equals(file, that.file) + && Objects.equals(parameters, that.parameters) + && Objects.equals(query, that.query) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(alert, dashboard, file, parameters, query, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskPb.class) + .add("alert", alert) + .add("dashboard", dashboard) + .add("file", file) + .add("parameters", parameters) + .add("query", query) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQuery.java index 3552e88ef..214a252c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQuery.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTaskQuery.SqlTaskQuerySerializer.class) +@JsonDeserialize(using = SqlTaskQuery.SqlTaskQueryDeserializer.class) public class SqlTaskQuery { /** The canonical identifier of the SQL query. */ - @JsonProperty("query_id") private String queryId; public SqlTaskQuery setQueryId(String queryId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(SqlTaskQuery.class).add("queryId", queryId).toString(); } + + SqlTaskQueryPb toPb() { + SqlTaskQueryPb pb = new SqlTaskQueryPb(); + pb.setQueryId(queryId); + + return pb; + } + + static SqlTaskQuery fromPb(SqlTaskQueryPb pb) { + SqlTaskQuery model = new SqlTaskQuery(); + model.setQueryId(pb.getQueryId()); + + return model; + } + + public static class SqlTaskQuerySerializer extends JsonSerializer { + @Override + public void serialize(SqlTaskQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskQueryDeserializer extends JsonDeserializer { + @Override + public SqlTaskQuery deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskQueryPb pb = mapper.readValue(p, SqlTaskQueryPb.class); + return SqlTaskQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQueryPb.java new file mode 100755 index 000000000..6d6a99f9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQueryPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlTaskQueryPb { + @JsonProperty("query_id") + private String queryId; + + public SqlTaskQueryPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskQueryPb that = (SqlTaskQueryPb) o; + return Objects.equals(queryId, that.queryId); + } + + @Override + public int hashCode() { + return Objects.hash(queryId); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskQueryPb.class).add("queryId", queryId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscription.java index 7854bce14..614c5054e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscription.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlTaskSubscription.SqlTaskSubscriptionSerializer.class) +@JsonDeserialize(using = SqlTaskSubscription.SqlTaskSubscriptionDeserializer.class) public class SqlTaskSubscription { /** * The canonical identifier of the destination to receive email notification. This parameter is * mutually exclusive with user_name. You cannot set both destination_id and user_name for * subscription notifications. */ - @JsonProperty("destination_id") private String destinationId; /** @@ -22,7 +32,6 @@ public class SqlTaskSubscription { * destination_id. You cannot set both destination_id and user_name for subscription * notifications. */ - @JsonProperty("user_name") private String userName; public SqlTaskSubscription setDestinationId(String destinationId) { @@ -64,4 +73,41 @@ public String toString() { .add("userName", userName) .toString(); } + + SqlTaskSubscriptionPb toPb() { + SqlTaskSubscriptionPb pb = new SqlTaskSubscriptionPb(); + pb.setDestinationId(destinationId); + pb.setUserName(userName); + + return pb; + } + + static SqlTaskSubscription fromPb(SqlTaskSubscriptionPb pb) { + SqlTaskSubscription model = new SqlTaskSubscription(); + model.setDestinationId(pb.getDestinationId()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class SqlTaskSubscriptionSerializer extends JsonSerializer { + @Override + public void serialize(SqlTaskSubscription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlTaskSubscriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlTaskSubscriptionDeserializer + extends JsonDeserializer { + @Override + public SqlTaskSubscription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlTaskSubscriptionPb pb = mapper.readValue(p, SqlTaskSubscriptionPb.class); + return SqlTaskSubscription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscriptionPb.java new file mode 100755 index 000000000..32842c430 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlTaskSubscriptionPb { + @JsonProperty("destination_id") + private String destinationId; + + @JsonProperty("user_name") + private String userName; + + public SqlTaskSubscriptionPb setDestinationId(String destinationId) { + this.destinationId = destinationId; + return this; + } + + public String getDestinationId() { + return destinationId; + } + + public SqlTaskSubscriptionPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlTaskSubscriptionPb that = (SqlTaskSubscriptionPb) o; + return Objects.equals(destinationId, that.destinationId) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(destinationId, userName); + } + + @Override + public String toString() { + return new ToStringer(SqlTaskSubscriptionPb.class) + .add("destinationId", destinationId) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index 694a4df67..101133e79 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -4,32 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SubmitRun.SubmitRunSerializer.class) +@JsonDeserialize(using = SubmitRun.SubmitRunDeserializer.class) public class SubmitRun { /** List of permissions to set on the job. */ - @JsonProperty("access_control_list") private Collection accessControlList; /** * The user specified id of the budget policy to use for this one-time run. If not specified, the * run will be not be attributed to any budget policy. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** An optional set of email addresses notified when the run begins or completes. */ - @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; /** * A list of task execution environment specifications that can be referenced by tasks of this * run. */ - @JsonProperty("environments") private Collection environments; /** @@ -43,11 +50,9 @@ public class SubmitRun { *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ - @JsonProperty("git_source") private GitSource gitSource; /** An optional set of health rules that can be defined for this job. */ - @JsonProperty("health") private JobsHealthRules health; /** @@ -65,41 +70,33 @@ public class SubmitRun { * *

[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html */ - @JsonProperty("idempotency_token") private String idempotencyToken; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this run. */ - @JsonProperty("notification_settings") private JobNotificationSettings notificationSettings; /** The queue settings of the one-time run. */ - @JsonProperty("queue") private QueueSettings queue; /** * Specifies the user or service principal that the job runs as. If not specified, the job runs as * the user who submits the request. */ - @JsonProperty("run_as") private JobRunAs runAs; /** An optional name for the run. The default value is `Untitled`. */ - @JsonProperty("run_name") private String runName; /** */ - @JsonProperty("tasks") private Collection tasks; /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** A collection of system notification IDs to notify when the run begins or completes. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; public SubmitRun setAccessControlList(Collection accessControlList) { @@ -287,4 +284,63 @@ public String toString() { .add("webhookNotifications", webhookNotifications) .toString(); } + + SubmitRunPb toPb() { + SubmitRunPb pb = new SubmitRunPb(); + pb.setAccessControlList(accessControlList); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setEmailNotifications(emailNotifications); + pb.setEnvironments(environments); + pb.setGitSource(gitSource); + pb.setHealth(health); + pb.setIdempotencyToken(idempotencyToken); + pb.setNotificationSettings(notificationSettings); + pb.setQueue(queue); + pb.setRunAs(runAs); + pb.setRunName(runName); + pb.setTasks(tasks); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setWebhookNotifications(webhookNotifications); + + return pb; + } + + static SubmitRun fromPb(SubmitRunPb pb) { + SubmitRun model = new SubmitRun(); + model.setAccessControlList(pb.getAccessControlList()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEnvironments(pb.getEnvironments()); + model.setGitSource(pb.getGitSource()); + model.setHealth(pb.getHealth()); + model.setIdempotencyToken(pb.getIdempotencyToken()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setQueue(pb.getQueue()); + model.setRunAs(pb.getRunAs()); + model.setRunName(pb.getRunName()); + model.setTasks(pb.getTasks()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + + return model; + } + + public static class SubmitRunSerializer extends JsonSerializer { + @Override + public void serialize(SubmitRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubmitRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubmitRunDeserializer extends JsonDeserializer { + @Override + public SubmitRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubmitRunPb pb = mapper.readValue(p, SubmitRunPb.class); + return SubmitRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunPb.java new file mode 100755 index 000000000..f7dfa8749 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SubmitRunPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + + @JsonProperty("environments") + private Collection environments; + + @JsonProperty("git_source") + private GitSource gitSource; + + @JsonProperty("health") + private JobsHealthRules health; + + @JsonProperty("idempotency_token") + private String idempotencyToken; + + @JsonProperty("notification_settings") + private JobNotificationSettings notificationSettings; + + @JsonProperty("queue") + private QueueSettings queue; + + @JsonProperty("run_as") + private JobRunAs runAs; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("tasks") + private Collection tasks; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + public SubmitRunPb setAccessControlList(Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public SubmitRunPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public SubmitRunPb setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public SubmitRunPb setEnvironments(Collection environments) { + this.environments = environments; + return this; + } + + public Collection getEnvironments() { + return environments; + } + + public SubmitRunPb setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + + public SubmitRunPb setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + + public SubmitRunPb setIdempotencyToken(String idempotencyToken) { + this.idempotencyToken = idempotencyToken; + return this; + } + + public String getIdempotencyToken() { + return idempotencyToken; + } + + public SubmitRunPb setNotificationSettings(JobNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public JobNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public SubmitRunPb setQueue(QueueSettings queue) { + this.queue = queue; + return this; + } + + public QueueSettings getQueue() { + return queue; + } + + public SubmitRunPb setRunAs(JobRunAs runAs) { + this.runAs = runAs; + return this; + } + + public JobRunAs getRunAs() { + return runAs; + } + + public SubmitRunPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public SubmitRunPb setTasks(Collection tasks) { + this.tasks = tasks; + return this; + } + + public Collection getTasks() { + return tasks; + } + + public SubmitRunPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public SubmitRunPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubmitRunPb that = (SubmitRunPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environments, that.environments) + && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) + && Objects.equals(idempotencyToken, that.idempotencyToken) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(queue, that.queue) + && Objects.equals(runAs, that.runAs) + && Objects.equals(runName, that.runName) + && Objects.equals(tasks, that.tasks) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); + } + + @Override + public int hashCode() { + return Objects.hash( + accessControlList, + budgetPolicyId, + emailNotifications, + environments, + gitSource, + health, + idempotencyToken, + notificationSettings, + queue, + runAs, + runName, + tasks, + timeoutSeconds, + webhookNotifications); + } + + @Override + public String toString() { + return new ToStringer(SubmitRunPb.class) + .add("accessControlList", accessControlList) + .add("budgetPolicyId", budgetPolicyId) + .add("emailNotifications", emailNotifications) + .add("environments", environments) + .add("gitSource", gitSource) + .add("health", health) + .add("idempotencyToken", idempotencyToken) + .add("notificationSettings", notificationSettings) + .add("queue", queue) + .add("runAs", runAs) + .add("runName", runName) + .add("tasks", tasks) + .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java index 1849bf903..8b0a13c7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Run was created and started successfully. */ @Generated +@JsonSerialize(using = SubmitRunResponse.SubmitRunResponseSerializer.class) +@JsonDeserialize(using = SubmitRunResponse.SubmitRunResponseDeserializer.class) public class SubmitRunResponse { /** The canonical identifier for the newly submitted run. */ - @JsonProperty("run_id") private Long runId; public SubmitRunResponse setRunId(Long runId) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(SubmitRunResponse.class).add("runId", runId).toString(); } + + SubmitRunResponsePb toPb() { + SubmitRunResponsePb pb = new SubmitRunResponsePb(); + pb.setRunId(runId); + + return pb; + } + + static SubmitRunResponse fromPb(SubmitRunResponsePb pb) { + SubmitRunResponse model = new SubmitRunResponse(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class SubmitRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(SubmitRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubmitRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubmitRunResponseDeserializer extends JsonDeserializer { + @Override + public SubmitRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubmitRunResponsePb pb = mapper.readValue(p, SubmitRunResponsePb.class); + return SubmitRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponsePb.java new file mode 100755 index 000000000..2cde0d1b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Run was created and started successfully. */ +@Generated +class SubmitRunResponsePb { + @JsonProperty("run_id") + private Long runId; + + public SubmitRunResponsePb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubmitRunResponsePb that = (SubmitRunResponsePb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(SubmitRunResponsePb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 1627e870c..fe3f29f5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SubmitTask.SubmitTaskSerializer.class) +@JsonDeserialize(using = SubmitTask.SubmitTaskDeserializer.class) public class SubmitTask { /** * The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. * *

[clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html */ - @JsonProperty("clean_rooms_notebook_task") private CleanRoomsNotebookTask cleanRoomsNotebookTask; /** @@ -23,18 +33,18 @@ public class SubmitTask { * the `condition_task` field is present. The condition task does not require a cluster to execute * and does not support retries or notifications. */ - @JsonProperty("condition_task") private ConditionTask conditionTask; /** The task refreshes a dashboard and sends a snapshot to subscribers. */ - @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ - @JsonProperty("dbt_task") private DbtTask dbtTask; /** @@ -42,25 +52,21 @@ public class SubmitTask { * in this field must complete successfully before executing this task. The key is `task_key`, and * the value is the name assigned to the dependent task. */ - @JsonProperty("depends_on") private Collection dependsOn; /** An optional description for this task. */ - @JsonProperty("description") private String description; /** * An optional set of email addresses notified when the task run begins or completes. The default * behavior is to not send any emails. */ - @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; /** * The key that references an environment spec in a job. This field is required for Python script, * Python wheel and dbt tasks when using serverless compute. */ - @JsonProperty("environment_key") private String environmentKey; /** @@ -68,61 +74,50 @@ public class SubmitTask { * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops * responding. We suggest running jobs and tasks on new clusters for greater reliability */ - @JsonProperty("existing_cluster_id") private String existingClusterId; /** * The task executes a nested task for every input provided when the `for_each_task` field is * present. */ - @JsonProperty("for_each_task") private ForEachTask forEachTask; /** */ - @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; /** An optional set of health rules that can be defined for this job. */ - @JsonProperty("health") private JobsHealthRules health; /** * An optional list of libraries to be installed on the cluster. The default value is an empty * list. */ - @JsonProperty("libraries") private Collection libraries; /** If new_cluster, a description of a new cluster that is created for each run. */ - @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; /** The task runs a notebook when the `notebook_task` field is present. */ - @JsonProperty("notebook_task") private NotebookTask notebookTask; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this task run. */ - @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; /** * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines * configured to use triggered more are supported. */ - @JsonProperty("pipeline_task") private PipelineTask pipelineTask; /** * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. */ - @JsonProperty("power_bi_task") private PowerBiTask powerBiTask; /** The task runs a Python wheel when the `python_wheel_task` field is present. */ - @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; /** @@ -130,19 +125,15 @@ public class SubmitTask { * its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See * :method:jobs/create for a list of possible values. */ - @JsonProperty("run_if") private RunIf runIf; /** The task triggers another job when the `run_job_task` field is present. */ - @JsonProperty("run_job_task") private RunJobTask runJobTask; /** The task runs a JAR when the `spark_jar_task` field is present. */ - @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; /** The task runs a Python file when the `spark_python_task` field is present. */ - @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** @@ -162,14 +153,12 @@ public class SubmitTask { * *

The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. */ - @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; /** * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when * the `sql_task` field is present. */ - @JsonProperty("sql_task") private SqlTask sqlTask; /** @@ -177,11 +166,9 @@ public class SubmitTask { * field is required and must be unique within its parent job. On Update or Reset, this field is * used to reference the tasks to be updated or reset. */ - @JsonProperty("task_key") private String taskKey; /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** @@ -189,7 +176,6 @@ public class SubmitTask { * behavior is to not send any system notifications. Task webhooks respect the task notification * settings. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; public SubmitTask setCleanRoomsNotebookTask(CleanRoomsNotebookTask cleanRoomsNotebookTask) { @@ -219,6 +205,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public SubmitTask setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public SubmitTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -452,6 +447,7 @@ public boolean equals(Object o) { return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -485,6 +481,7 @@ public int hashCode() { cleanRoomsNotebookTask, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -518,6 +515,7 @@ public String toString() { .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -545,4 +543,93 @@ public String toString() { .add("webhookNotifications", webhookNotifications) .toString(); } + + SubmitTaskPb toPb() { + SubmitTaskPb pb = new SubmitTaskPb(); + pb.setCleanRoomsNotebookTask(cleanRoomsNotebookTask); + pb.setConditionTask(conditionTask); + pb.setDashboardTask(dashboardTask); + pb.setDbtCloudTask(dbtCloudTask); + pb.setDbtTask(dbtTask); + pb.setDependsOn(dependsOn); + pb.setDescription(description); + pb.setEmailNotifications(emailNotifications); + pb.setEnvironmentKey(environmentKey); + pb.setExistingClusterId(existingClusterId); + pb.setForEachTask(forEachTask); + pb.setGenAiComputeTask(genAiComputeTask); + pb.setHealth(health); + pb.setLibraries(libraries); + pb.setNewCluster(newCluster); + pb.setNotebookTask(notebookTask); + pb.setNotificationSettings(notificationSettings); + pb.setPipelineTask(pipelineTask); + pb.setPowerBiTask(powerBiTask); + pb.setPythonWheelTask(pythonWheelTask); + pb.setRunIf(runIf); + pb.setRunJobTask(runJobTask); + pb.setSparkJarTask(sparkJarTask); + pb.setSparkPythonTask(sparkPythonTask); + pb.setSparkSubmitTask(sparkSubmitTask); + pb.setSqlTask(sqlTask); + pb.setTaskKey(taskKey); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setWebhookNotifications(webhookNotifications); + + return pb; + } + + static SubmitTask fromPb(SubmitTaskPb pb) { + SubmitTask model = new SubmitTask(); + model.setCleanRoomsNotebookTask(pb.getCleanRoomsNotebookTask()); + model.setConditionTask(pb.getConditionTask()); + model.setDashboardTask(pb.getDashboardTask()); + model.setDbtCloudTask(pb.getDbtCloudTask()); + model.setDbtTask(pb.getDbtTask()); + model.setDependsOn(pb.getDependsOn()); + model.setDescription(pb.getDescription()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEnvironmentKey(pb.getEnvironmentKey()); + model.setExistingClusterId(pb.getExistingClusterId()); + model.setForEachTask(pb.getForEachTask()); + model.setGenAiComputeTask(pb.getGenAiComputeTask()); + model.setHealth(pb.getHealth()); + model.setLibraries(pb.getLibraries()); + model.setNewCluster(pb.getNewCluster()); + model.setNotebookTask(pb.getNotebookTask()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setPipelineTask(pb.getPipelineTask()); + model.setPowerBiTask(pb.getPowerBiTask()); + model.setPythonWheelTask(pb.getPythonWheelTask()); + model.setRunIf(pb.getRunIf()); + model.setRunJobTask(pb.getRunJobTask()); + model.setSparkJarTask(pb.getSparkJarTask()); + model.setSparkPythonTask(pb.getSparkPythonTask()); + model.setSparkSubmitTask(pb.getSparkSubmitTask()); + model.setSqlTask(pb.getSqlTask()); + model.setTaskKey(pb.getTaskKey()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + + return model; + } + + public static class SubmitTaskSerializer extends JsonSerializer { + @Override + public void serialize(SubmitTask value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubmitTaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubmitTaskDeserializer extends JsonDeserializer { + @Override + public SubmitTask deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubmitTaskPb pb = mapper.readValue(p, SubmitTaskPb.class); + return SubmitTask.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTaskPb.java new file mode 100755 index 000000000..434a446b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTaskPb.java @@ -0,0 +1,466 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SubmitTaskPb { + @JsonProperty("clean_rooms_notebook_task") + private CleanRoomsNotebookTask cleanRoomsNotebookTask; + + @JsonProperty("condition_task") + private ConditionTask conditionTask; + + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + + @JsonProperty("dbt_task") + private DbtTask dbtTask; + + @JsonProperty("depends_on") + private Collection dependsOn; + + @JsonProperty("description") + private String description; + + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + + @JsonProperty("environment_key") + private String environmentKey; + + @JsonProperty("existing_cluster_id") + private String existingClusterId; + + @JsonProperty("for_each_task") + private ForEachTask forEachTask; + + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + + @JsonProperty("health") + private JobsHealthRules health; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("new_cluster") + private com.databricks.sdk.service.compute.ClusterSpec newCluster; + + @JsonProperty("notebook_task") + private NotebookTask notebookTask; + + @JsonProperty("notification_settings") + private TaskNotificationSettings notificationSettings; + + @JsonProperty("pipeline_task") + private PipelineTask pipelineTask; + + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + + @JsonProperty("python_wheel_task") + private PythonWheelTask pythonWheelTask; + + @JsonProperty("run_if") + private RunIf runIf; + + @JsonProperty("run_job_task") + private RunJobTask runJobTask; + + @JsonProperty("spark_jar_task") + private SparkJarTask sparkJarTask; + + @JsonProperty("spark_python_task") + private SparkPythonTask sparkPythonTask; + + @JsonProperty("spark_submit_task") + private SparkSubmitTask sparkSubmitTask; + + @JsonProperty("sql_task") + private SqlTask sqlTask; + + @JsonProperty("task_key") + private String taskKey; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + public SubmitTaskPb setCleanRoomsNotebookTask(CleanRoomsNotebookTask cleanRoomsNotebookTask) { + this.cleanRoomsNotebookTask = cleanRoomsNotebookTask; + return this; + } + + public CleanRoomsNotebookTask getCleanRoomsNotebookTask() { + return cleanRoomsNotebookTask; + } + + public SubmitTaskPb setConditionTask(ConditionTask conditionTask) { + this.conditionTask = conditionTask; + return this; + } + + public ConditionTask getConditionTask() { + return conditionTask; + } + + public SubmitTaskPb setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + + public SubmitTaskPb setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + + public SubmitTaskPb setDbtTask(DbtTask dbtTask) { + this.dbtTask = dbtTask; + return this; + } + + public DbtTask getDbtTask() { + return dbtTask; + } + + public SubmitTaskPb setDependsOn(Collection dependsOn) { + this.dependsOn = dependsOn; + return this; + } + + public Collection getDependsOn() { + return dependsOn; + } + + public SubmitTaskPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public SubmitTaskPb setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public SubmitTaskPb setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + + public SubmitTaskPb setExistingClusterId(String existingClusterId) { + this.existingClusterId = existingClusterId; + return this; + } + + public String getExistingClusterId() { + return existingClusterId; + } + + public SubmitTaskPb setForEachTask(ForEachTask forEachTask) { + this.forEachTask = forEachTask; + return this; + } + + public ForEachTask getForEachTask() { + return forEachTask; + } + + public SubmitTaskPb setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + + public SubmitTaskPb setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + + public SubmitTaskPb setLibraries( + Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public SubmitTaskPb setNewCluster(com.databricks.sdk.service.compute.ClusterSpec newCluster) { + this.newCluster = newCluster; + return this; + } + + public com.databricks.sdk.service.compute.ClusterSpec getNewCluster() { + return newCluster; + } + + public SubmitTaskPb setNotebookTask(NotebookTask notebookTask) { + this.notebookTask = notebookTask; + return this; + } + + public NotebookTask getNotebookTask() { + return notebookTask; + } + + public SubmitTaskPb setNotificationSettings(TaskNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public TaskNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public SubmitTaskPb setPipelineTask(PipelineTask pipelineTask) { + this.pipelineTask = pipelineTask; + return this; + } + + public PipelineTask getPipelineTask() { + return pipelineTask; + } + + public SubmitTaskPb setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + + public SubmitTaskPb setPythonWheelTask(PythonWheelTask pythonWheelTask) { + this.pythonWheelTask = pythonWheelTask; + return this; + } + + public PythonWheelTask getPythonWheelTask() { + return pythonWheelTask; + } + + public SubmitTaskPb setRunIf(RunIf runIf) { + this.runIf = runIf; + return this; + } + + public RunIf getRunIf() { + return runIf; + } + + public SubmitTaskPb setRunJobTask(RunJobTask runJobTask) { + this.runJobTask = runJobTask; + return this; + } + + public RunJobTask getRunJobTask() { + return runJobTask; + } + + public SubmitTaskPb setSparkJarTask(SparkJarTask sparkJarTask) { + this.sparkJarTask = sparkJarTask; + return this; + } + + public SparkJarTask getSparkJarTask() { + return sparkJarTask; + } + + public SubmitTaskPb setSparkPythonTask(SparkPythonTask sparkPythonTask) { + this.sparkPythonTask = sparkPythonTask; + return this; + } + + public SparkPythonTask getSparkPythonTask() { + return sparkPythonTask; + } + + public SubmitTaskPb setSparkSubmitTask(SparkSubmitTask sparkSubmitTask) { + this.sparkSubmitTask = sparkSubmitTask; + return this; + } + + public SparkSubmitTask getSparkSubmitTask() { + return sparkSubmitTask; + } + + public SubmitTaskPb setSqlTask(SqlTask sqlTask) { + this.sqlTask = sqlTask; + return this; + } + + public SqlTask getSqlTask() { + return sqlTask; + } + + public SubmitTaskPb setTaskKey(String taskKey) { + this.taskKey = taskKey; + return this; + } + + public String getTaskKey() { + return taskKey; + } + + public SubmitTaskPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public SubmitTaskPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubmitTaskPb that = (SubmitTaskPb) o; + return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) + && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) + && Objects.equals(dbtTask, that.dbtTask) + && Objects.equals(dependsOn, that.dependsOn) + && Objects.equals(description, that.description) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environmentKey, that.environmentKey) + && Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) + && Objects.equals(health, that.health) + && Objects.equals(libraries, that.libraries) + && Objects.equals(newCluster, that.newCluster) + && Objects.equals(notebookTask, that.notebookTask) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) + && Objects.equals(pythonWheelTask, that.pythonWheelTask) + && Objects.equals(runIf, that.runIf) + && Objects.equals(runJobTask, that.runJobTask) + && Objects.equals(sparkJarTask, that.sparkJarTask) + && Objects.equals(sparkPythonTask, that.sparkPythonTask) + && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) + && Objects.equals(sqlTask, that.sqlTask) + && Objects.equals(taskKey, that.taskKey) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); + } + + @Override + public int hashCode() { + return Objects.hash( + cleanRoomsNotebookTask, + conditionTask, + dashboardTask, + dbtCloudTask, + dbtTask, + dependsOn, + description, + emailNotifications, + environmentKey, + existingClusterId, + forEachTask, + genAiComputeTask, + health, + libraries, + newCluster, + notebookTask, + notificationSettings, + pipelineTask, + powerBiTask, + pythonWheelTask, + runIf, + runJobTask, + sparkJarTask, + sparkPythonTask, + sparkSubmitTask, + sqlTask, + taskKey, + timeoutSeconds, + webhookNotifications); + } + + @Override + public String toString() { + return new ToStringer(SubmitTaskPb.class) + .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) + .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) + .add("dbtTask", dbtTask) + .add("dependsOn", dependsOn) + .add("description", description) + .add("emailNotifications", emailNotifications) + .add("environmentKey", environmentKey) + .add("existingClusterId", existingClusterId) + .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) + .add("health", health) + .add("libraries", libraries) + .add("newCluster", newCluster) + .add("notebookTask", notebookTask) + .add("notificationSettings", notificationSettings) + .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) + .add("pythonWheelTask", pythonWheelTask) + .add("runIf", runIf) + .add("runJobTask", runJobTask) + .add("sparkJarTask", sparkJarTask) + .add("sparkPythonTask", sparkPythonTask) + .add("sparkSubmitTask", sparkSubmitTask) + .add("sqlTask", sqlTask) + .add("taskKey", taskKey) + .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java index 5a9d182b0..fd9ff16a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Subscription.SubscriptionSerializer.class) +@JsonDeserialize(using = Subscription.SubscriptionDeserializer.class) public class Subscription { /** Optional: Allows users to specify a custom subject line on the email sent to subscribers. */ - @JsonProperty("custom_subject") private String customSubject; /** When true, the subscription will not send emails. */ - @JsonProperty("paused") private Boolean paused; /** The list of subscribers to send the snapshot of the dashboard to. */ - @JsonProperty("subscribers") private Collection subscribers; public Subscription setCustomSubject(String customSubject) { @@ -72,4 +80,41 @@ public String toString() { .add("subscribers", subscribers) .toString(); } + + SubscriptionPb toPb() { + SubscriptionPb pb = new SubscriptionPb(); + pb.setCustomSubject(customSubject); + pb.setPaused(paused); + pb.setSubscribers(subscribers); + + return pb; + } + + static Subscription fromPb(SubscriptionPb pb) { + Subscription model = new Subscription(); + model.setCustomSubject(pb.getCustomSubject()); + model.setPaused(pb.getPaused()); + model.setSubscribers(pb.getSubscribers()); + + return model; + } + + public static class SubscriptionSerializer extends JsonSerializer { + @Override + public void serialize(Subscription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriptionDeserializer extends JsonDeserializer { + @Override + public Subscription deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriptionPb pb = mapper.readValue(p, SubscriptionPb.class); + return Subscription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionPb.java new file mode 100755 index 000000000..c0bf36e0b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SubscriptionPb { + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("paused") + private Boolean paused; + + @JsonProperty("subscribers") + private Collection subscribers; + + public SubscriptionPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public SubscriptionPb setPaused(Boolean paused) { + this.paused = paused; + return this; + } + + public Boolean getPaused() { + return paused; + } + + public SubscriptionPb setSubscribers(Collection subscribers) { + this.subscribers = subscribers; + return this; + } + + public Collection getSubscribers() { + return subscribers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionPb that = (SubscriptionPb) o; + return Objects.equals(customSubject, that.customSubject) + && Objects.equals(paused, that.paused) + && Objects.equals(subscribers, that.subscribers); + } + + @Override + public int hashCode() { + return Objects.hash(customSubject, paused, subscribers); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionPb.class) + .add("customSubject", customSubject) + .add("paused", paused) + .add("subscribers", subscribers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java index 43fa8e734..59788c5ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SubscriptionSubscriber.SubscriptionSubscriberSerializer.class) +@JsonDeserialize(using = SubscriptionSubscriber.SubscriptionSubscriberDeserializer.class) public class SubscriptionSubscriber { /** * A snapshot of the dashboard will be sent to the destination when the `destination_id` field is * present. */ - @JsonProperty("destination_id") private String destinationId; /** * A snapshot of the dashboard will be sent to the user's email when the `user_name` field is * present. */ - @JsonProperty("user_name") private String userName; public SubscriptionSubscriber setDestinationId(String destinationId) { @@ -62,4 +71,43 @@ public String toString() { .add("userName", userName) .toString(); } + + SubscriptionSubscriberPb toPb() { + SubscriptionSubscriberPb pb = new SubscriptionSubscriberPb(); + pb.setDestinationId(destinationId); + pb.setUserName(userName); + + return pb; + } + + static SubscriptionSubscriber fromPb(SubscriptionSubscriberPb pb) { + SubscriptionSubscriber model = new SubscriptionSubscriber(); + model.setDestinationId(pb.getDestinationId()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class SubscriptionSubscriberSerializer + extends JsonSerializer { + @Override + public void serialize( + SubscriptionSubscriber value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SubscriptionSubscriberPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SubscriptionSubscriberDeserializer + extends JsonDeserializer { + @Override + public SubscriptionSubscriber deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SubscriptionSubscriberPb pb = mapper.readValue(p, SubscriptionSubscriberPb.class); + return SubscriptionSubscriber.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriberPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriberPb.java new file mode 100755 index 000000000..e00cc8a2e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriberPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SubscriptionSubscriberPb { + @JsonProperty("destination_id") + private String destinationId; + + @JsonProperty("user_name") + private String userName; + + public SubscriptionSubscriberPb setDestinationId(String destinationId) { + this.destinationId = destinationId; + return this; + } + + public String getDestinationId() { + return destinationId; + } + + public SubscriptionSubscriberPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionSubscriberPb that = (SubscriptionSubscriberPb) o; + return Objects.equals(destinationId, that.destinationId) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(destinationId, userName); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionSubscriberPb.class) + .add("destinationId", destinationId) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java index db8989c2e..de593455f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java @@ -4,28 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = TableUpdateTriggerConfiguration.TableUpdateTriggerConfigurationSerializer.class) +@JsonDeserialize( + using = TableUpdateTriggerConfiguration.TableUpdateTriggerConfigurationDeserializer.class) public class TableUpdateTriggerConfiguration { /** The table(s) condition based on which to trigger a job run. */ - @JsonProperty("condition") private Condition condition; /** * If set, the trigger starts a run only after the specified amount of time has passed since the * last time the trigger fired. The minimum allowed value is 60 seconds. */ - @JsonProperty("min_time_between_triggers_seconds") private Long minTimeBetweenTriggersSeconds; /** * A list of Delta tables to monitor for changes. The table name must be in the format * `catalog_name.schema_name.table_name`. */ - @JsonProperty("table_names") private Collection tableNames; /** @@ -33,7 +43,6 @@ public class TableUpdateTriggerConfiguration { * time and can be used to wait for a series of table updates before triggering a run. The minimum * allowed value is 60 seconds. */ - @JsonProperty("wait_after_last_change_seconds") private Long waitAfterLastChangeSeconds; public TableUpdateTriggerConfiguration setCondition(Condition condition) { @@ -100,4 +109,48 @@ public String toString() { .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds) .toString(); } + + TableUpdateTriggerConfigurationPb toPb() { + TableUpdateTriggerConfigurationPb pb = new TableUpdateTriggerConfigurationPb(); + pb.setCondition(condition); + pb.setMinTimeBetweenTriggersSeconds(minTimeBetweenTriggersSeconds); + pb.setTableNames(tableNames); + pb.setWaitAfterLastChangeSeconds(waitAfterLastChangeSeconds); + + return pb; + } + + static TableUpdateTriggerConfiguration fromPb(TableUpdateTriggerConfigurationPb pb) { + TableUpdateTriggerConfiguration model = new TableUpdateTriggerConfiguration(); + model.setCondition(pb.getCondition()); + model.setMinTimeBetweenTriggersSeconds(pb.getMinTimeBetweenTriggersSeconds()); + model.setTableNames(pb.getTableNames()); + model.setWaitAfterLastChangeSeconds(pb.getWaitAfterLastChangeSeconds()); + + return model; + } + + public static class TableUpdateTriggerConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + TableUpdateTriggerConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableUpdateTriggerConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableUpdateTriggerConfigurationDeserializer + extends JsonDeserializer { + @Override + public TableUpdateTriggerConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableUpdateTriggerConfigurationPb pb = + mapper.readValue(p, TableUpdateTriggerConfigurationPb.class); + return TableUpdateTriggerConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfigurationPb.java new file mode 100755 index 000000000..ecfcb392b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfigurationPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TableUpdateTriggerConfigurationPb { + @JsonProperty("condition") + private Condition condition; + + @JsonProperty("min_time_between_triggers_seconds") + private Long minTimeBetweenTriggersSeconds; + + @JsonProperty("table_names") + private Collection tableNames; + + @JsonProperty("wait_after_last_change_seconds") + private Long waitAfterLastChangeSeconds; + + public TableUpdateTriggerConfigurationPb setCondition(Condition condition) { + this.condition = condition; + return this; + } + + public Condition getCondition() { + return condition; + } + + public TableUpdateTriggerConfigurationPb setMinTimeBetweenTriggersSeconds( + Long minTimeBetweenTriggersSeconds) { + this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds; + return this; + } + + public Long getMinTimeBetweenTriggersSeconds() { + return minTimeBetweenTriggersSeconds; + } + + public TableUpdateTriggerConfigurationPb setTableNames(Collection tableNames) { + this.tableNames = tableNames; + return this; + } + + public Collection getTableNames() { + return tableNames; + } + + public TableUpdateTriggerConfigurationPb setWaitAfterLastChangeSeconds( + Long waitAfterLastChangeSeconds) { + this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds; + return this; + } + + public Long getWaitAfterLastChangeSeconds() { + return waitAfterLastChangeSeconds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableUpdateTriggerConfigurationPb that = (TableUpdateTriggerConfigurationPb) o; + return Objects.equals(condition, that.condition) + && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds) + && Objects.equals(tableNames, that.tableNames) + && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds); + } + + @Override + public int hashCode() { + return Objects.hash( + condition, minTimeBetweenTriggersSeconds, tableNames, waitAfterLastChangeSeconds); + } + + @Override + public String toString() { + return new ToStringer(TableUpdateTriggerConfigurationPb.class) + .add("condition", condition) + .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds) + .add("tableNames", tableNames) + .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 6729c4691..a70b7bd8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Task.TaskSerializer.class) +@JsonDeserialize(using = Task.TaskDeserializer.class) public class Task { /** * The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. * *

[clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html */ - @JsonProperty("clean_rooms_notebook_task") private CleanRoomsNotebookTask cleanRoomsNotebookTask; /** @@ -23,18 +33,18 @@ public class Task { * the `condition_task` field is present. The condition task does not require a cluster to execute * and does not support retries or notifications. */ - @JsonProperty("condition_task") private ConditionTask conditionTask; /** The task refreshes a dashboard and sends a snapshot to subscribers. */ - @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ - @JsonProperty("dbt_task") private DbtTask dbtTask; /** @@ -43,29 +53,24 @@ public class Task { * condition is true. The key is `task_key`, and the value is the name assigned to the dependent * task. */ - @JsonProperty("depends_on") private Collection dependsOn; /** An optional description for this task. */ - @JsonProperty("description") private String description; /** An option to disable auto optimization in serverless */ - @JsonProperty("disable_auto_optimization") private Boolean disableAutoOptimization; /** * An optional set of email addresses that is notified when runs of this task begin or complete as * well as when this task is deleted. The default behavior is to not send any emails. */ - @JsonProperty("email_notifications") private TaskEmailNotifications emailNotifications; /** * The key that references an environment spec in a job. This field is required for Python script, * Python wheel and dbt tasks when using serverless compute. */ - @JsonProperty("environment_key") private String environmentKey; /** @@ -73,36 +78,30 @@ public class Task { * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops * responding. We suggest running jobs and tasks on new clusters for greater reliability */ - @JsonProperty("existing_cluster_id") private String existingClusterId; /** * The task executes a nested task for every input provided when the `for_each_task` field is * present. */ - @JsonProperty("for_each_task") private ForEachTask forEachTask; /** */ - @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; /** An optional set of health rules that can be defined for this job. */ - @JsonProperty("health") private JobsHealthRules health; /** * If job_cluster_key, this task is executed reusing the cluster specified in * `job.settings.job_clusters`. */ - @JsonProperty("job_cluster_key") private String jobClusterKey; /** * An optional list of libraries to be installed on the cluster. The default value is an empty * list. */ - @JsonProperty("libraries") private Collection libraries; /** @@ -111,53 +110,44 @@ public class Task { * `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never * retry. */ - @JsonProperty("max_retries") private Long maxRetries; /** * An optional minimal interval in milliseconds between the start of the failed run and the * subsequent retry run. The default behavior is that unsuccessful runs are immediately retried. */ - @JsonProperty("min_retry_interval_millis") private Long minRetryIntervalMillis; /** If new_cluster, a description of a new cluster that is created for each run. */ - @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; /** The task runs a notebook when the `notebook_task` field is present. */ - @JsonProperty("notebook_task") private NotebookTask notebookTask; /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this task. */ - @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; /** * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines * configured to use triggered more are supported. */ - @JsonProperty("pipeline_task") private PipelineTask pipelineTask; /** * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. */ - @JsonProperty("power_bi_task") private PowerBiTask powerBiTask; /** The task runs a Python wheel when the `python_wheel_task` field is present. */ - @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; /** * An optional policy to specify whether to retry a job when it times out. The default behavior is * to not retry on timeout. */ - @JsonProperty("retry_on_timeout") private Boolean retryOnTimeout; /** @@ -170,19 +160,15 @@ public class Task { * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have * failed */ - @JsonProperty("run_if") private RunIf runIf; /** The task triggers another job when the `run_job_task` field is present. */ - @JsonProperty("run_job_task") private RunJobTask runJobTask; /** The task runs a JAR when the `spark_jar_task` field is present. */ - @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; /** The task runs a Python file when the `spark_python_task` field is present. */ - @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** @@ -202,14 +188,12 @@ public class Task { * *

The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. */ - @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; /** * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when * the `sql_task` field is present. */ - @JsonProperty("sql_task") private SqlTask sqlTask; /** @@ -217,18 +201,15 @@ public class Task { * field is required and must be unique within its parent job. On Update or Reset, this field is * used to reference the tasks to be updated or reset. */ - @JsonProperty("task_key") private String taskKey; /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ - @JsonProperty("timeout_seconds") private Long timeoutSeconds; /** * A collection of system notification IDs to notify when runs of this task begin or complete. The * default behavior is to not send any system notifications. */ - @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; public Task setCleanRoomsNotebookTask(CleanRoomsNotebookTask cleanRoomsNotebookTask) { @@ -258,6 +239,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public Task setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public Task setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -536,6 +526,7 @@ public boolean equals(Object o) { return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -574,6 +565,7 @@ public int hashCode() { cleanRoomsNotebookTask, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -612,6 +604,7 @@ public String toString() { .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -644,4 +637,103 @@ public String toString() { .add("webhookNotifications", webhookNotifications) .toString(); } + + TaskPb toPb() { + TaskPb pb = new TaskPb(); + pb.setCleanRoomsNotebookTask(cleanRoomsNotebookTask); + pb.setConditionTask(conditionTask); + pb.setDashboardTask(dashboardTask); + pb.setDbtCloudTask(dbtCloudTask); + pb.setDbtTask(dbtTask); + pb.setDependsOn(dependsOn); + pb.setDescription(description); + pb.setDisableAutoOptimization(disableAutoOptimization); + pb.setEmailNotifications(emailNotifications); + pb.setEnvironmentKey(environmentKey); + pb.setExistingClusterId(existingClusterId); + pb.setForEachTask(forEachTask); + pb.setGenAiComputeTask(genAiComputeTask); + pb.setHealth(health); + pb.setJobClusterKey(jobClusterKey); + pb.setLibraries(libraries); + pb.setMaxRetries(maxRetries); + pb.setMinRetryIntervalMillis(minRetryIntervalMillis); + pb.setNewCluster(newCluster); + pb.setNotebookTask(notebookTask); + pb.setNotificationSettings(notificationSettings); + pb.setPipelineTask(pipelineTask); + pb.setPowerBiTask(powerBiTask); + pb.setPythonWheelTask(pythonWheelTask); + pb.setRetryOnTimeout(retryOnTimeout); + pb.setRunIf(runIf); + pb.setRunJobTask(runJobTask); + pb.setSparkJarTask(sparkJarTask); + pb.setSparkPythonTask(sparkPythonTask); + pb.setSparkSubmitTask(sparkSubmitTask); + pb.setSqlTask(sqlTask); + pb.setTaskKey(taskKey); + pb.setTimeoutSeconds(timeoutSeconds); + pb.setWebhookNotifications(webhookNotifications); + + return pb; + } + + static Task fromPb(TaskPb pb) { + Task model = new Task(); + model.setCleanRoomsNotebookTask(pb.getCleanRoomsNotebookTask()); + model.setConditionTask(pb.getConditionTask()); + model.setDashboardTask(pb.getDashboardTask()); + model.setDbtCloudTask(pb.getDbtCloudTask()); + model.setDbtTask(pb.getDbtTask()); + model.setDependsOn(pb.getDependsOn()); + model.setDescription(pb.getDescription()); + model.setDisableAutoOptimization(pb.getDisableAutoOptimization()); + model.setEmailNotifications(pb.getEmailNotifications()); + model.setEnvironmentKey(pb.getEnvironmentKey()); + model.setExistingClusterId(pb.getExistingClusterId()); + model.setForEachTask(pb.getForEachTask()); + model.setGenAiComputeTask(pb.getGenAiComputeTask()); + model.setHealth(pb.getHealth()); + model.setJobClusterKey(pb.getJobClusterKey()); + model.setLibraries(pb.getLibraries()); + model.setMaxRetries(pb.getMaxRetries()); + model.setMinRetryIntervalMillis(pb.getMinRetryIntervalMillis()); + model.setNewCluster(pb.getNewCluster()); + model.setNotebookTask(pb.getNotebookTask()); + model.setNotificationSettings(pb.getNotificationSettings()); + model.setPipelineTask(pb.getPipelineTask()); + model.setPowerBiTask(pb.getPowerBiTask()); + model.setPythonWheelTask(pb.getPythonWheelTask()); + model.setRetryOnTimeout(pb.getRetryOnTimeout()); + model.setRunIf(pb.getRunIf()); + model.setRunJobTask(pb.getRunJobTask()); + model.setSparkJarTask(pb.getSparkJarTask()); + model.setSparkPythonTask(pb.getSparkPythonTask()); + model.setSparkSubmitTask(pb.getSparkSubmitTask()); + model.setSqlTask(pb.getSqlTask()); + model.setTaskKey(pb.getTaskKey()); + model.setTimeoutSeconds(pb.getTimeoutSeconds()); + model.setWebhookNotifications(pb.getWebhookNotifications()); + + return model; + } + + public static class TaskSerializer extends JsonSerializer { + @Override + public void serialize(Task value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskDeserializer extends JsonDeserializer { + @Override + public Task deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskPb pb = mapper.readValue(p, TaskPb.class); + return Task.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java index b7a35e6db..ba57cddab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TaskDependency.TaskDependencySerializer.class) +@JsonDeserialize(using = TaskDependency.TaskDependencyDeserializer.class) public class TaskDependency { /** * Can only be specified on condition task dependencies. The outcome of the dependent task that * must be met for this task to run. */ - @JsonProperty("outcome") private String outcome; /** The name of the task this task depends on. */ - @JsonProperty("task_key") private String taskKey; public TaskDependency setOutcome(String outcome) { @@ -58,4 +67,40 @@ public String toString() { .add("taskKey", taskKey) .toString(); } + + TaskDependencyPb toPb() { + TaskDependencyPb pb = new TaskDependencyPb(); + pb.setOutcome(outcome); + pb.setTaskKey(taskKey); + + return pb; + } + + static TaskDependency fromPb(TaskDependencyPb pb) { + TaskDependency model = new TaskDependency(); + model.setOutcome(pb.getOutcome()); + model.setTaskKey(pb.getTaskKey()); + + return model; + } + + public static class TaskDependencySerializer extends JsonSerializer { + @Override + public void serialize(TaskDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskDependencyDeserializer extends JsonDeserializer { + @Override + public TaskDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskDependencyPb pb = mapper.readValue(p, TaskDependencyPb.class); + return TaskDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependencyPb.java new file mode 100755 index 000000000..c276ba10e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependencyPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TaskDependencyPb { + @JsonProperty("outcome") + private String outcome; + + @JsonProperty("task_key") + private String taskKey; + + public TaskDependencyPb setOutcome(String outcome) { + this.outcome = outcome; + return this; + } + + public String getOutcome() { + return outcome; + } + + public TaskDependencyPb setTaskKey(String taskKey) { + this.taskKey = taskKey; + return this; + } + + public String getTaskKey() { + return taskKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskDependencyPb that = (TaskDependencyPb) o; + return Objects.equals(outcome, that.outcome) && Objects.equals(taskKey, that.taskKey); + } + + @Override + public int hashCode() { + return Objects.hash(outcome, taskKey); + } + + @Override + public String toString() { + return new ToStringer(TaskDependencyPb.class) + .add("outcome", outcome) + .add("taskKey", taskKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java index 440ad8cf8..73401c4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TaskEmailNotifications.TaskEmailNotificationsSerializer.class) +@JsonDeserialize(using = TaskEmailNotifications.TaskEmailNotificationsDeserializer.class) public class TaskEmailNotifications { /** * If true, do not send email to recipients specified in `on_failure` if the run is skipped. This * field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. */ - @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; /** @@ -23,7 +33,6 @@ public class TaskEmailNotifications { * `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are * not sent. */ - @JsonProperty("on_duration_warning_threshold_exceeded") private Collection onDurationWarningThresholdExceeded; /** @@ -32,14 +41,12 @@ public class TaskEmailNotifications { * `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job * creation, reset, or update the list is empty, and notifications are not sent. */ - @JsonProperty("on_failure") private Collection onFailure; /** * A list of email addresses to be notified when a run begins. If not specified on job creation, * reset, or update, the list is empty, and notifications are not sent. */ - @JsonProperty("on_start") private Collection onStart; /** @@ -49,7 +56,6 @@ public class TaskEmailNotifications { * or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If * the issue persists, notifications are resent every 30 minutes. */ - @JsonProperty("on_streaming_backlog_exceeded") private Collection onStreamingBacklogExceeded; /** @@ -58,7 +64,6 @@ public class TaskEmailNotifications { * `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, * and notifications are not sent. */ - @JsonProperty("on_success") private Collection onSuccess; public TaskEmailNotifications setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { @@ -153,4 +158,51 @@ public String toString() { .add("onSuccess", onSuccess) .toString(); } + + TaskEmailNotificationsPb toPb() { + TaskEmailNotificationsPb pb = new TaskEmailNotificationsPb(); + pb.setNoAlertForSkippedRuns(noAlertForSkippedRuns); + pb.setOnDurationWarningThresholdExceeded(onDurationWarningThresholdExceeded); + pb.setOnFailure(onFailure); + pb.setOnStart(onStart); + pb.setOnStreamingBacklogExceeded(onStreamingBacklogExceeded); + pb.setOnSuccess(onSuccess); + + return pb; + } + + static TaskEmailNotifications fromPb(TaskEmailNotificationsPb pb) { + TaskEmailNotifications model = new TaskEmailNotifications(); + model.setNoAlertForSkippedRuns(pb.getNoAlertForSkippedRuns()); + model.setOnDurationWarningThresholdExceeded(pb.getOnDurationWarningThresholdExceeded()); + model.setOnFailure(pb.getOnFailure()); + model.setOnStart(pb.getOnStart()); + model.setOnStreamingBacklogExceeded(pb.getOnStreamingBacklogExceeded()); + model.setOnSuccess(pb.getOnSuccess()); + + return model; + } + + public static class TaskEmailNotificationsSerializer + extends JsonSerializer { + @Override + public void serialize( + TaskEmailNotifications value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskEmailNotificationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskEmailNotificationsDeserializer + extends JsonDeserializer { + @Override + public TaskEmailNotifications deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskEmailNotificationsPb pb = mapper.readValue(p, TaskEmailNotificationsPb.class); + return TaskEmailNotifications.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotificationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotificationsPb.java new file mode 100755 index 000000000..8e992729a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotificationsPb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TaskEmailNotificationsPb { + @JsonProperty("no_alert_for_skipped_runs") + private Boolean noAlertForSkippedRuns; + + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection onDurationWarningThresholdExceeded; + + @JsonProperty("on_failure") + private Collection onFailure; + + @JsonProperty("on_start") + private Collection onStart; + + @JsonProperty("on_streaming_backlog_exceeded") + private Collection onStreamingBacklogExceeded; + + @JsonProperty("on_success") + private Collection onSuccess; + + public TaskEmailNotificationsPb setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { + this.noAlertForSkippedRuns = noAlertForSkippedRuns; + return this; + } + + public Boolean getNoAlertForSkippedRuns() { + return noAlertForSkippedRuns; + } + + public TaskEmailNotificationsPb setOnDurationWarningThresholdExceeded( + Collection onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + + public TaskEmailNotificationsPb setOnFailure(Collection onFailure) { + this.onFailure = onFailure; + return this; + } + + public Collection getOnFailure() { + return onFailure; + } + + public TaskEmailNotificationsPb setOnStart(Collection onStart) { + this.onStart = onStart; + return this; + } + + public Collection getOnStart() { + return onStart; + } + + public TaskEmailNotificationsPb setOnStreamingBacklogExceeded( + Collection onStreamingBacklogExceeded) { + this.onStreamingBacklogExceeded = onStreamingBacklogExceeded; + return this; + } + + public Collection getOnStreamingBacklogExceeded() { + return onStreamingBacklogExceeded; + } + + public TaskEmailNotificationsPb setOnSuccess(Collection onSuccess) { + this.onSuccess = onSuccess; + return this; + } + + public Collection getOnSuccess() { + return onSuccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskEmailNotificationsPb that = (TaskEmailNotificationsPb) o; + return Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns) + && Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) + && Objects.equals(onFailure, that.onFailure) + && Objects.equals(onStart, that.onStart) + && Objects.equals(onStreamingBacklogExceeded, that.onStreamingBacklogExceeded) + && Objects.equals(onSuccess, that.onSuccess); + } + + @Override + public int hashCode() { + return Objects.hash( + noAlertForSkippedRuns, + onDurationWarningThresholdExceeded, + onFailure, + onStart, + onStreamingBacklogExceeded, + onSuccess); + } + + @Override + public String toString() { + return new ToStringer(TaskEmailNotificationsPb.class) + .add("noAlertForSkippedRuns", noAlertForSkippedRuns) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) + .add("onFailure", onFailure) + .add("onStart", onStart) + .add("onStreamingBacklogExceeded", onStreamingBacklogExceeded) + .add("onSuccess", onSuccess) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettings.java index d01ee91ef..a1832f371 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettings.java @@ -4,31 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TaskNotificationSettings.TaskNotificationSettingsSerializer.class) +@JsonDeserialize(using = TaskNotificationSettings.TaskNotificationSettingsDeserializer.class) public class TaskNotificationSettings { /** * If true, do not send notifications to recipients specified in `on_start` for the retried runs * and do not send notifications to recipients specified in `on_failure` until the last retry of * the run. */ - @JsonProperty("alert_on_last_attempt") private Boolean alertOnLastAttempt; /** * If true, do not send notifications to recipients specified in `on_failure` if the run is * canceled. */ - @JsonProperty("no_alert_for_canceled_runs") private Boolean noAlertForCanceledRuns; /** * If true, do not send notifications to recipients specified in `on_failure` if the run is * skipped. */ - @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; public TaskNotificationSettings setAlertOnLastAttempt(Boolean alertOnLastAttempt) { @@ -81,4 +89,45 @@ public String toString() { .add("noAlertForSkippedRuns", noAlertForSkippedRuns) .toString(); } + + TaskNotificationSettingsPb toPb() { + TaskNotificationSettingsPb pb = new TaskNotificationSettingsPb(); + pb.setAlertOnLastAttempt(alertOnLastAttempt); + pb.setNoAlertForCanceledRuns(noAlertForCanceledRuns); + pb.setNoAlertForSkippedRuns(noAlertForSkippedRuns); + + return pb; + } + + static TaskNotificationSettings fromPb(TaskNotificationSettingsPb pb) { + TaskNotificationSettings model = new TaskNotificationSettings(); + model.setAlertOnLastAttempt(pb.getAlertOnLastAttempt()); + model.setNoAlertForCanceledRuns(pb.getNoAlertForCanceledRuns()); + model.setNoAlertForSkippedRuns(pb.getNoAlertForSkippedRuns()); + + return model; + } + + public static class TaskNotificationSettingsSerializer + extends JsonSerializer { + @Override + public void serialize( + TaskNotificationSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskNotificationSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskNotificationSettingsDeserializer + extends JsonDeserializer { + @Override + public TaskNotificationSettings deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskNotificationSettingsPb pb = mapper.readValue(p, TaskNotificationSettingsPb.class); + return TaskNotificationSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettingsPb.java new file mode 100755 index 000000000..df7f6a0cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettingsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TaskNotificationSettingsPb { + @JsonProperty("alert_on_last_attempt") + private Boolean alertOnLastAttempt; + + @JsonProperty("no_alert_for_canceled_runs") + private Boolean noAlertForCanceledRuns; + + @JsonProperty("no_alert_for_skipped_runs") + private Boolean noAlertForSkippedRuns; + + public TaskNotificationSettingsPb setAlertOnLastAttempt(Boolean alertOnLastAttempt) { + this.alertOnLastAttempt = alertOnLastAttempt; + return this; + } + + public Boolean getAlertOnLastAttempt() { + return alertOnLastAttempt; + } + + public TaskNotificationSettingsPb setNoAlertForCanceledRuns(Boolean noAlertForCanceledRuns) { + this.noAlertForCanceledRuns = noAlertForCanceledRuns; + return this; + } + + public Boolean getNoAlertForCanceledRuns() { + return noAlertForCanceledRuns; + } + + public TaskNotificationSettingsPb setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { + this.noAlertForSkippedRuns = noAlertForSkippedRuns; + return this; + } + + public Boolean getNoAlertForSkippedRuns() { + return noAlertForSkippedRuns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskNotificationSettingsPb that = (TaskNotificationSettingsPb) o; + return Objects.equals(alertOnLastAttempt, that.alertOnLastAttempt) + && Objects.equals(noAlertForCanceledRuns, that.noAlertForCanceledRuns) + && Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns); + } + + @Override + public int hashCode() { + return Objects.hash(alertOnLastAttempt, noAlertForCanceledRuns, noAlertForSkippedRuns); + } + + @Override + public String toString() { + return new ToStringer(TaskNotificationSettingsPb.class) + .add("alertOnLastAttempt", alertOnLastAttempt) + .add("noAlertForCanceledRuns", noAlertForCanceledRuns) + .add("noAlertForSkippedRuns", noAlertForSkippedRuns) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskPb.java new file mode 100755 index 000000000..7c6a0d358 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskPb.java @@ -0,0 +1,540 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TaskPb { + @JsonProperty("clean_rooms_notebook_task") + private CleanRoomsNotebookTask cleanRoomsNotebookTask; + + @JsonProperty("condition_task") + private ConditionTask conditionTask; + + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + + @JsonProperty("dbt_task") + private DbtTask dbtTask; + + @JsonProperty("depends_on") + private Collection dependsOn; + + @JsonProperty("description") + private String description; + + @JsonProperty("disable_auto_optimization") + private Boolean disableAutoOptimization; + + @JsonProperty("email_notifications") + private TaskEmailNotifications emailNotifications; + + @JsonProperty("environment_key") + private String environmentKey; + + @JsonProperty("existing_cluster_id") + private String existingClusterId; + + @JsonProperty("for_each_task") + private ForEachTask forEachTask; + + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + + @JsonProperty("health") + private JobsHealthRules health; + + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("max_retries") + private Long maxRetries; + + @JsonProperty("min_retry_interval_millis") + private Long minRetryIntervalMillis; + + @JsonProperty("new_cluster") + private com.databricks.sdk.service.compute.ClusterSpec newCluster; + + @JsonProperty("notebook_task") + private NotebookTask notebookTask; + + @JsonProperty("notification_settings") + private TaskNotificationSettings notificationSettings; + + @JsonProperty("pipeline_task") + private PipelineTask pipelineTask; + + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + + @JsonProperty("python_wheel_task") + private PythonWheelTask pythonWheelTask; + + @JsonProperty("retry_on_timeout") + private Boolean retryOnTimeout; + + @JsonProperty("run_if") + private RunIf runIf; + + @JsonProperty("run_job_task") + private RunJobTask runJobTask; + + @JsonProperty("spark_jar_task") + private SparkJarTask sparkJarTask; + + @JsonProperty("spark_python_task") + private SparkPythonTask sparkPythonTask; + + @JsonProperty("spark_submit_task") + private SparkSubmitTask sparkSubmitTask; + + @JsonProperty("sql_task") + private SqlTask sqlTask; + + @JsonProperty("task_key") + private String taskKey; + + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + + public TaskPb setCleanRoomsNotebookTask(CleanRoomsNotebookTask cleanRoomsNotebookTask) { + this.cleanRoomsNotebookTask = cleanRoomsNotebookTask; + return this; + } + + public CleanRoomsNotebookTask getCleanRoomsNotebookTask() { + return cleanRoomsNotebookTask; + } + + public TaskPb setConditionTask(ConditionTask conditionTask) { + this.conditionTask = conditionTask; + return this; + } + + public ConditionTask getConditionTask() { + return conditionTask; + } + + public TaskPb setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + + public TaskPb setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + + public TaskPb setDbtTask(DbtTask dbtTask) { + this.dbtTask = dbtTask; + return this; + } + + public DbtTask getDbtTask() { + return dbtTask; + } + + public TaskPb setDependsOn(Collection dependsOn) { + this.dependsOn = dependsOn; + return this; + } + + public Collection getDependsOn() { + return dependsOn; + } + + public TaskPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public TaskPb setDisableAutoOptimization(Boolean disableAutoOptimization) { + this.disableAutoOptimization = disableAutoOptimization; + return this; + } + + public Boolean getDisableAutoOptimization() { + return disableAutoOptimization; + } + + public TaskPb setEmailNotifications(TaskEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public TaskEmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public TaskPb setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + + public TaskPb setExistingClusterId(String existingClusterId) { + this.existingClusterId = existingClusterId; + return this; + } + + public String getExistingClusterId() { + return existingClusterId; + } + + public TaskPb setForEachTask(ForEachTask forEachTask) { + this.forEachTask = forEachTask; + return this; + } + + public ForEachTask getForEachTask() { + return forEachTask; + } + + public TaskPb setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + + public TaskPb setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + + public TaskPb setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + + public TaskPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public TaskPb setMaxRetries(Long maxRetries) { + this.maxRetries = maxRetries; + return this; + } + + public Long getMaxRetries() { + return maxRetries; + } + + public TaskPb setMinRetryIntervalMillis(Long minRetryIntervalMillis) { + this.minRetryIntervalMillis = minRetryIntervalMillis; + return this; + } + + public Long getMinRetryIntervalMillis() { + return minRetryIntervalMillis; + } + + public TaskPb setNewCluster(com.databricks.sdk.service.compute.ClusterSpec newCluster) { + this.newCluster = newCluster; + return this; + } + + public com.databricks.sdk.service.compute.ClusterSpec getNewCluster() { + return newCluster; + } + + public TaskPb setNotebookTask(NotebookTask notebookTask) { + this.notebookTask = notebookTask; + return this; + } + + public NotebookTask getNotebookTask() { + return notebookTask; + } + + public TaskPb setNotificationSettings(TaskNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public TaskNotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public TaskPb setPipelineTask(PipelineTask pipelineTask) { + this.pipelineTask = pipelineTask; + return this; + } + + public PipelineTask getPipelineTask() { + return pipelineTask; + } + + public TaskPb setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + + public TaskPb setPythonWheelTask(PythonWheelTask pythonWheelTask) { + this.pythonWheelTask = pythonWheelTask; + return this; + } + + public PythonWheelTask getPythonWheelTask() { + return pythonWheelTask; + } + + public TaskPb setRetryOnTimeout(Boolean retryOnTimeout) { + this.retryOnTimeout = retryOnTimeout; + return this; + } + + public Boolean getRetryOnTimeout() { + return retryOnTimeout; + } + + public TaskPb setRunIf(RunIf runIf) { + this.runIf = runIf; + return this; + } + + public RunIf getRunIf() { + return runIf; + } + + public TaskPb setRunJobTask(RunJobTask runJobTask) { + this.runJobTask = runJobTask; + return this; + } + + public RunJobTask getRunJobTask() { + return runJobTask; + } + + public TaskPb setSparkJarTask(SparkJarTask sparkJarTask) { + this.sparkJarTask = sparkJarTask; + return this; + } + + public SparkJarTask getSparkJarTask() { + return sparkJarTask; + } + + public TaskPb setSparkPythonTask(SparkPythonTask sparkPythonTask) { + this.sparkPythonTask = sparkPythonTask; + return this; + } + + public SparkPythonTask getSparkPythonTask() { + return sparkPythonTask; + } + + public TaskPb setSparkSubmitTask(SparkSubmitTask sparkSubmitTask) { + this.sparkSubmitTask = sparkSubmitTask; + return this; + } + + public SparkSubmitTask getSparkSubmitTask() { + return sparkSubmitTask; + } + + public TaskPb setSqlTask(SqlTask sqlTask) { + this.sqlTask = sqlTask; + return this; + } + + public SqlTask getSqlTask() { + return sqlTask; + } + + public TaskPb setTaskKey(String taskKey) { + this.taskKey = taskKey; + return this; + } + + public String getTaskKey() { + return taskKey; + } + + public TaskPb setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public TaskPb setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskPb that = (TaskPb) o; + return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) + && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) + && Objects.equals(dbtTask, that.dbtTask) + && Objects.equals(dependsOn, that.dependsOn) + && Objects.equals(description, that.description) + && Objects.equals(disableAutoOptimization, that.disableAutoOptimization) + && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environmentKey, that.environmentKey) + && Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) + && Objects.equals(health, that.health) + && Objects.equals(jobClusterKey, that.jobClusterKey) + && Objects.equals(libraries, that.libraries) + && Objects.equals(maxRetries, that.maxRetries) + && Objects.equals(minRetryIntervalMillis, that.minRetryIntervalMillis) + && Objects.equals(newCluster, that.newCluster) + && Objects.equals(notebookTask, that.notebookTask) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) + && Objects.equals(pythonWheelTask, that.pythonWheelTask) + && Objects.equals(retryOnTimeout, that.retryOnTimeout) + && Objects.equals(runIf, that.runIf) + && Objects.equals(runJobTask, that.runJobTask) + && Objects.equals(sparkJarTask, that.sparkJarTask) + && Objects.equals(sparkPythonTask, that.sparkPythonTask) + && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) + && Objects.equals(sqlTask, that.sqlTask) + && Objects.equals(taskKey, that.taskKey) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); + } + + @Override + public int hashCode() { + return Objects.hash( + cleanRoomsNotebookTask, + conditionTask, + dashboardTask, + dbtCloudTask, + dbtTask, + dependsOn, + description, + disableAutoOptimization, + emailNotifications, + environmentKey, + existingClusterId, + forEachTask, + genAiComputeTask, + health, + jobClusterKey, + libraries, + maxRetries, + minRetryIntervalMillis, + newCluster, + notebookTask, + notificationSettings, + pipelineTask, + powerBiTask, + pythonWheelTask, + retryOnTimeout, + runIf, + runJobTask, + sparkJarTask, + sparkPythonTask, + sparkSubmitTask, + sqlTask, + taskKey, + timeoutSeconds, + webhookNotifications); + } + + @Override + public String toString() { + return new ToStringer(TaskPb.class) + .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) + .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) + .add("dbtTask", dbtTask) + .add("dependsOn", dependsOn) + .add("description", description) + .add("disableAutoOptimization", disableAutoOptimization) + .add("emailNotifications", emailNotifications) + .add("environmentKey", environmentKey) + .add("existingClusterId", existingClusterId) + .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) + .add("health", health) + .add("jobClusterKey", jobClusterKey) + .add("libraries", libraries) + .add("maxRetries", maxRetries) + .add("minRetryIntervalMillis", minRetryIntervalMillis) + .add("newCluster", newCluster) + .add("notebookTask", notebookTask) + .add("notificationSettings", notificationSettings) + .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) + .add("pythonWheelTask", pythonWheelTask) + .add("retryOnTimeout", retryOnTimeout) + .add("runIf", runIf) + .add("runJobTask", runJobTask) + .add("sparkJarTask", sparkJarTask) + .add("sparkPythonTask", sparkPythonTask) + .add("sparkSubmitTask", sparkSubmitTask) + .add("sqlTask", sqlTask) + .add("taskKey", taskKey) + .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java index 73025923e..8e359d6b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TerminationDetails.TerminationDetailsSerializer.class) +@JsonDeserialize(using = TerminationDetails.TerminationDetailsDeserializer.class) public class TerminationDetails { /** * The code indicates why the run was terminated. Additional codes might be introduced in future @@ -49,14 +60,12 @@ public class TerminationDetails { *

[Link]: * https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now */ - @JsonProperty("code") private TerminationCodeCode code; /** * A descriptive message with the termination details. This field is unstructured and the format * might change. */ - @JsonProperty("message") private String message; /** @@ -68,7 +77,6 @@ public class TerminationDetails { * *

[status page]: https://status.databricks.com/ */ - @JsonProperty("type") private TerminationTypeType typeValue; public TerminationDetails setCode(TerminationCodeCode code) { @@ -121,4 +129,42 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + TerminationDetailsPb toPb() { + TerminationDetailsPb pb = new TerminationDetailsPb(); + pb.setCode(code); + pb.setMessage(message); + pb.setType(typeValue); + + return pb; + } + + static TerminationDetails fromPb(TerminationDetailsPb pb) { + TerminationDetails model = new TerminationDetails(); + model.setCode(pb.getCode()); + model.setMessage(pb.getMessage()); + model.setType(pb.getType()); + + return model; + } + + public static class TerminationDetailsSerializer extends JsonSerializer { + @Override + public void serialize(TerminationDetails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TerminationDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TerminationDetailsDeserializer extends JsonDeserializer { + @Override + public TerminationDetails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TerminationDetailsPb pb = mapper.readValue(p, TerminationDetailsPb.class); + return TerminationDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetailsPb.java new file mode 100755 index 000000000..e2658ae8a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetailsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TerminationDetailsPb { + @JsonProperty("code") + private TerminationCodeCode code; + + @JsonProperty("message") + private String message; + + @JsonProperty("type") + private TerminationTypeType typeValue; + + public TerminationDetailsPb setCode(TerminationCodeCode code) { + this.code = code; + return this; + } + + public TerminationCodeCode getCode() { + return code; + } + + public TerminationDetailsPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public TerminationDetailsPb setType(TerminationTypeType typeValue) { + this.typeValue = typeValue; + return this; + } + + public TerminationTypeType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TerminationDetailsPb that = (TerminationDetailsPb) o; + return Objects.equals(code, that.code) + && Objects.equals(message, that.message) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(code, message, typeValue); + } + + @Override + public String toString() { + return new ToStringer(TerminationDetailsPb.class) + .add("code", code) + .add("message", message) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java index 68c5c837b..a92f42ccc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Additional details about what triggered the run */ @Generated +@JsonSerialize(using = TriggerInfo.TriggerInfoSerializer.class) +@JsonDeserialize(using = TriggerInfo.TriggerInfoDeserializer.class) public class TriggerInfo { /** The run id of the Run Job task run */ - @JsonProperty("run_id") private Long runId; public TriggerInfo setRunId(Long runId) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(TriggerInfo.class).add("runId", runId).toString(); } + + TriggerInfoPb toPb() { + TriggerInfoPb pb = new TriggerInfoPb(); + pb.setRunId(runId); + + return pb; + } + + static TriggerInfo fromPb(TriggerInfoPb pb) { + TriggerInfo model = new TriggerInfo(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class TriggerInfoSerializer extends JsonSerializer { + @Override + public void serialize(TriggerInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TriggerInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TriggerInfoDeserializer extends JsonDeserializer { + @Override + public TriggerInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TriggerInfoPb pb = mapper.readValue(p, TriggerInfoPb.class); + return TriggerInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfoPb.java new file mode 100755 index 000000000..a3a269b36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Additional details about what triggered the run */ +@Generated +class TriggerInfoPb { + @JsonProperty("run_id") + private Long runId; + + public TriggerInfoPb setRunId(Long runId) { + this.runId = runId; + return this; + } + + public Long getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TriggerInfoPb that = (TriggerInfoPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(TriggerInfoPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java index 7ee1fe4b1..9dc2ca43b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TriggerSettings.TriggerSettingsSerializer.class) +@JsonDeserialize(using = TriggerSettings.TriggerSettingsDeserializer.class) public class TriggerSettings { /** File arrival trigger settings. */ - @JsonProperty("file_arrival") private FileArrivalTriggerConfiguration fileArrival; /** Whether this trigger is paused or not. */ - @JsonProperty("pause_status") private PauseStatus pauseStatus; /** Periodic trigger settings. */ - @JsonProperty("periodic") private PeriodicTriggerConfiguration periodic; /** Old table trigger settings name. Deprecated in favor of `table_update`. */ - @JsonProperty("table") private TableUpdateTriggerConfiguration table; /** */ - @JsonProperty("table_update") private TableUpdateTriggerConfiguration tableUpdate; public TriggerSettings setFileArrival(FileArrivalTriggerConfiguration fileArrival) { @@ -101,4 +107,46 @@ public String toString() { .add("tableUpdate", tableUpdate) .toString(); } + + TriggerSettingsPb toPb() { + TriggerSettingsPb pb = new TriggerSettingsPb(); + pb.setFileArrival(fileArrival); + pb.setPauseStatus(pauseStatus); + pb.setPeriodic(periodic); + pb.setTable(table); + pb.setTableUpdate(tableUpdate); + + return pb; + } + + static TriggerSettings fromPb(TriggerSettingsPb pb) { + TriggerSettings model = new TriggerSettings(); + model.setFileArrival(pb.getFileArrival()); + model.setPauseStatus(pb.getPauseStatus()); + model.setPeriodic(pb.getPeriodic()); + model.setTable(pb.getTable()); + model.setTableUpdate(pb.getTableUpdate()); + + return model; + } + + public static class TriggerSettingsSerializer extends JsonSerializer { + @Override + public void serialize(TriggerSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TriggerSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TriggerSettingsDeserializer extends JsonDeserializer { + @Override + public TriggerSettings deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TriggerSettingsPb pb = mapper.readValue(p, TriggerSettingsPb.class); + return TriggerSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettingsPb.java new file mode 100755 index 000000000..9d670ee8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettingsPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TriggerSettingsPb { + @JsonProperty("file_arrival") + private FileArrivalTriggerConfiguration fileArrival; + + @JsonProperty("pause_status") + private PauseStatus pauseStatus; + + @JsonProperty("periodic") + private PeriodicTriggerConfiguration periodic; + + @JsonProperty("table") + private TableUpdateTriggerConfiguration table; + + @JsonProperty("table_update") + private TableUpdateTriggerConfiguration tableUpdate; + + public TriggerSettingsPb setFileArrival(FileArrivalTriggerConfiguration fileArrival) { + this.fileArrival = fileArrival; + return this; + } + + public FileArrivalTriggerConfiguration getFileArrival() { + return fileArrival; + } + + public TriggerSettingsPb setPauseStatus(PauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public PauseStatus getPauseStatus() { + return pauseStatus; + } + + public TriggerSettingsPb setPeriodic(PeriodicTriggerConfiguration periodic) { + this.periodic = periodic; + return this; + } + + public PeriodicTriggerConfiguration getPeriodic() { + return periodic; + } + + public TriggerSettingsPb setTable(TableUpdateTriggerConfiguration table) { + this.table = table; + return this; + } + + public TableUpdateTriggerConfiguration getTable() { + return table; + } + + public TriggerSettingsPb setTableUpdate(TableUpdateTriggerConfiguration tableUpdate) { + this.tableUpdate = tableUpdate; + return this; + } + + public TableUpdateTriggerConfiguration getTableUpdate() { + return tableUpdate; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TriggerSettingsPb that = (TriggerSettingsPb) o; + return Objects.equals(fileArrival, that.fileArrival) + && Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(periodic, that.periodic) + && Objects.equals(table, that.table) + && Objects.equals(tableUpdate, that.tableUpdate); + } + + @Override + public int hashCode() { + return Objects.hash(fileArrival, pauseStatus, periodic, table, tableUpdate); + } + + @Override + public String toString() { + return new ToStringer(TriggerSettingsPb.class) + .add("fileArrival", fileArrival) + .add("pauseStatus", pauseStatus) + .add("periodic", periodic) + .add("table", table) + .add("tableUpdate", tableUpdate) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java index 7d4b16120..023c200db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateJob.UpdateJobSerializer.class) +@JsonDeserialize(using = UpdateJob.UpdateJobDeserializer.class) public class UpdateJob { /** * Remove top-level fields in the job settings. Removing nested fields is not supported, except * for tasks and job clusters (`tasks/task_1`). This field is optional. */ - @JsonProperty("fields_to_remove") private Collection fieldsToRemove; /** The canonical identifier of the job to update. This field is required. */ - @JsonProperty("job_id") private Long jobId; /** @@ -33,7 +42,6 @@ public class UpdateJob { *

Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to * other fields are applied to future runs only. */ - @JsonProperty("new_settings") private JobSettings newSettings; public UpdateJob setFieldsToRemove(Collection fieldsToRemove) { @@ -86,4 +94,41 @@ public String toString() { .add("newSettings", newSettings) .toString(); } + + UpdateJobPb toPb() { + UpdateJobPb pb = new UpdateJobPb(); + pb.setFieldsToRemove(fieldsToRemove); + pb.setJobId(jobId); + pb.setNewSettings(newSettings); + + return pb; + } + + static UpdateJob fromPb(UpdateJobPb pb) { + UpdateJob model = new UpdateJob(); + model.setFieldsToRemove(pb.getFieldsToRemove()); + model.setJobId(pb.getJobId()); + model.setNewSettings(pb.getNewSettings()); + + return model; + } + + public static class UpdateJobSerializer extends JsonSerializer { + @Override + public void serialize(UpdateJob value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateJobPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateJobDeserializer extends JsonDeserializer { + @Override + public UpdateJob deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateJobPb pb = mapper.readValue(p, UpdateJobPb.class); + return UpdateJob.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJobPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJobPb.java new file mode 100755 index 000000000..2ae705c80 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJobPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateJobPb { + @JsonProperty("fields_to_remove") + private Collection fieldsToRemove; + + @JsonProperty("job_id") + private Long jobId; + + @JsonProperty("new_settings") + private JobSettings newSettings; + + public UpdateJobPb setFieldsToRemove(Collection fieldsToRemove) { + this.fieldsToRemove = fieldsToRemove; + return this; + } + + public Collection getFieldsToRemove() { + return fieldsToRemove; + } + + public UpdateJobPb setJobId(Long jobId) { + this.jobId = jobId; + return this; + } + + public Long getJobId() { + return jobId; + } + + public UpdateJobPb setNewSettings(JobSettings newSettings) { + this.newSettings = newSettings; + return this; + } + + public JobSettings getNewSettings() { + return newSettings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateJobPb that = (UpdateJobPb) o; + return Objects.equals(fieldsToRemove, that.fieldsToRemove) + && Objects.equals(jobId, that.jobId) + && Objects.equals(newSettings, that.newSettings); + } + + @Override + public int hashCode() { + return Objects.hash(fieldsToRemove, jobId, newSettings); + } + + @Override + public String toString() { + return new ToStringer(UpdateJobPb.class) + .add("fieldsToRemove", fieldsToRemove) + .add("jobId", jobId) + .add("newSettings", newSettings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java index d53424f25..ce3ace85c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponsePb.java new file mode 100755 index 000000000..475eff577 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java index 9969dc867..b163e3350 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ViewItem.ViewItemSerializer.class) +@JsonDeserialize(using = ViewItem.ViewItemDeserializer.class) public class ViewItem { /** Content of the view. */ - @JsonProperty("content") private String content; /** * Name of the view item. In the case of code view, it would be the notebook’s name. In the case * of dashboard view, it would be the dashboard’s name. */ - @JsonProperty("name") private String name; /** Type of the view item. */ - @JsonProperty("type") private ViewType typeValue; public ViewItem setContent(String content) { @@ -74,4 +82,41 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + ViewItemPb toPb() { + ViewItemPb pb = new ViewItemPb(); + pb.setContent(content); + pb.setName(name); + pb.setType(typeValue); + + return pb; + } + + static ViewItem fromPb(ViewItemPb pb) { + ViewItem model = new ViewItem(); + model.setContent(pb.getContent()); + model.setName(pb.getName()); + model.setType(pb.getType()); + + return model; + } + + public static class ViewItemSerializer extends JsonSerializer { + @Override + public void serialize(ViewItem value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ViewItemPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ViewItemDeserializer extends JsonDeserializer { + @Override + public ViewItem deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ViewItemPb pb = mapper.readValue(p, ViewItemPb.class); + return ViewItem.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItemPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItemPb.java new file mode 100755 index 000000000..bc45e12e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItemPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ViewItemPb { + @JsonProperty("content") + private String content; + + @JsonProperty("name") + private String name; + + @JsonProperty("type") + private ViewType typeValue; + + public ViewItemPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public ViewItemPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ViewItemPb setType(ViewType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ViewType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ViewItemPb that = (ViewItemPb) o; + return Objects.equals(content, that.content) + && Objects.equals(name, that.name) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(content, name, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ViewItemPb.class) + .add("content", content) + .add("name", name) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java index 270e5ae06..3777d4478 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Webhook.WebhookSerializer.class) +@JsonDeserialize(using = Webhook.WebhookDeserializer.class) public class Webhook { /** */ - @JsonProperty("id") private String id; public Webhook setId(String id) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Webhook.class).add("id", id).toString(); } + + WebhookPb toPb() { + WebhookPb pb = new WebhookPb(); + pb.setId(id); + + return pb; + } + + static Webhook fromPb(WebhookPb pb) { + Webhook model = new Webhook(); + model.setId(pb.getId()); + + return model; + } + + public static class WebhookSerializer extends JsonSerializer { + @Override + public void serialize(Webhook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WebhookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WebhookDeserializer extends JsonDeserializer { + @Override + public Webhook deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WebhookPb pb = mapper.readValue(p, WebhookPb.class); + return Webhook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java index 72d92748d..8de9229b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java @@ -4,32 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WebhookNotifications.WebhookNotificationsSerializer.class) +@JsonDeserialize(using = WebhookNotifications.WebhookNotificationsDeserializer.class) public class WebhookNotifications { /** * An optional list of system notification IDs to call when the duration of a run exceeds the * threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 * destinations can be specified for the `on_duration_warning_threshold_exceeded` property. */ - @JsonProperty("on_duration_warning_threshold_exceeded") private Collection onDurationWarningThresholdExceeded; /** * An optional list of system notification IDs to call when the run fails. A maximum of 3 * destinations can be specified for the `on_failure` property. */ - @JsonProperty("on_failure") private Collection onFailure; /** * An optional list of system notification IDs to call when the run starts. A maximum of 3 * destinations can be specified for the `on_start` property. */ - @JsonProperty("on_start") private Collection onStart; /** @@ -40,14 +48,12 @@ public class WebhookNotifications { * average of these metrics. If the issue persists, notifications are resent every 30 minutes. A * maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property. */ - @JsonProperty("on_streaming_backlog_exceeded") private Collection onStreamingBacklogExceeded; /** * An optional list of system notification IDs to call when the run completes successfully. A * maximum of 3 destinations can be specified for the `on_success` property. */ - @JsonProperty("on_success") private Collection onSuccess; public WebhookNotifications setOnDurationWarningThresholdExceeded( @@ -130,4 +136,48 @@ public String toString() { .add("onSuccess", onSuccess) .toString(); } + + WebhookNotificationsPb toPb() { + WebhookNotificationsPb pb = new WebhookNotificationsPb(); + pb.setOnDurationWarningThresholdExceeded(onDurationWarningThresholdExceeded); + pb.setOnFailure(onFailure); + pb.setOnStart(onStart); + pb.setOnStreamingBacklogExceeded(onStreamingBacklogExceeded); + pb.setOnSuccess(onSuccess); + + return pb; + } + + static WebhookNotifications fromPb(WebhookNotificationsPb pb) { + WebhookNotifications model = new WebhookNotifications(); + model.setOnDurationWarningThresholdExceeded(pb.getOnDurationWarningThresholdExceeded()); + model.setOnFailure(pb.getOnFailure()); + model.setOnStart(pb.getOnStart()); + model.setOnStreamingBacklogExceeded(pb.getOnStreamingBacklogExceeded()); + model.setOnSuccess(pb.getOnSuccess()); + + return model; + } + + public static class WebhookNotificationsSerializer extends JsonSerializer { + @Override + public void serialize( + WebhookNotifications value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WebhookNotificationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WebhookNotificationsDeserializer + extends JsonDeserializer { + @Override + public WebhookNotifications deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WebhookNotificationsPb pb = mapper.readValue(p, WebhookNotificationsPb.class); + return WebhookNotifications.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsPb.java new file mode 100755 index 000000000..b22f1f72d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsPb.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WebhookNotificationsPb { + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection onDurationWarningThresholdExceeded; + + @JsonProperty("on_failure") + private Collection onFailure; + + @JsonProperty("on_start") + private Collection onStart; + + @JsonProperty("on_streaming_backlog_exceeded") + private Collection onStreamingBacklogExceeded; + + @JsonProperty("on_success") + private Collection onSuccess; + + public WebhookNotificationsPb setOnDurationWarningThresholdExceeded( + Collection onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + + public WebhookNotificationsPb setOnFailure(Collection onFailure) { + this.onFailure = onFailure; + return this; + } + + public Collection getOnFailure() { + return onFailure; + } + + public WebhookNotificationsPb setOnStart(Collection onStart) { + this.onStart = onStart; + return this; + } + + public Collection getOnStart() { + return onStart; + } + + public WebhookNotificationsPb setOnStreamingBacklogExceeded( + Collection onStreamingBacklogExceeded) { + this.onStreamingBacklogExceeded = onStreamingBacklogExceeded; + return this; + } + + public Collection getOnStreamingBacklogExceeded() { + return onStreamingBacklogExceeded; + } + + public WebhookNotificationsPb setOnSuccess(Collection onSuccess) { + this.onSuccess = onSuccess; + return this; + } + + public Collection getOnSuccess() { + return onSuccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WebhookNotificationsPb that = (WebhookNotificationsPb) o; + return Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) + && Objects.equals(onFailure, that.onFailure) + && Objects.equals(onStart, that.onStart) + && Objects.equals(onStreamingBacklogExceeded, that.onStreamingBacklogExceeded) + && Objects.equals(onSuccess, that.onSuccess); + } + + @Override + public int hashCode() { + return Objects.hash( + onDurationWarningThresholdExceeded, + onFailure, + onStart, + onStreamingBacklogExceeded, + onSuccess); + } + + @Override + public String toString() { + return new ToStringer(WebhookNotificationsPb.class) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) + .add("onFailure", onFailure) + .add("onStart", onStart) + .add("onStreamingBacklogExceeded", onStreamingBacklogExceeded) + .add("onSuccess", onSuccess) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookPb.java new file mode 100755 index 000000000..6116be9ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WebhookPb { + @JsonProperty("id") + private String id; + + public WebhookPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WebhookPb that = (WebhookPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(WebhookPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java index d40158f88..26012def1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WidgetErrorDetail.WidgetErrorDetailSerializer.class) +@JsonDeserialize(using = WidgetErrorDetail.WidgetErrorDetailDeserializer.class) public class WidgetErrorDetail { /** */ - @JsonProperty("message") private String message; public WidgetErrorDetail setMessage(String message) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(WidgetErrorDetail.class).add("message", message).toString(); } + + WidgetErrorDetailPb toPb() { + WidgetErrorDetailPb pb = new WidgetErrorDetailPb(); + pb.setMessage(message); + + return pb; + } + + static WidgetErrorDetail fromPb(WidgetErrorDetailPb pb) { + WidgetErrorDetail model = new WidgetErrorDetail(); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class WidgetErrorDetailSerializer extends JsonSerializer { + @Override + public void serialize(WidgetErrorDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WidgetErrorDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WidgetErrorDetailDeserializer extends JsonDeserializer { + @Override + public WidgetErrorDetail deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WidgetErrorDetailPb pb = mapper.readValue(p, WidgetErrorDetailPb.class); + return WidgetErrorDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetailPb.java new file mode 100755 index 000000000..62e1bb975 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetailPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WidgetErrorDetailPb { + @JsonProperty("message") + private String message; + + public WidgetErrorDetailPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WidgetErrorDetailPb that = (WidgetErrorDetailPb) o; + return Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(message); + } + + @Override + public String toString() { + return new ToStringer(WidgetErrorDetailPb.class).add("message", message).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java index 8c788493a..92135c74d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddExchangeForListingRequest.AddExchangeForListingRequestSerializer.class) +@JsonDeserialize( + using = AddExchangeForListingRequest.AddExchangeForListingRequestDeserializer.class) public class AddExchangeForListingRequest { /** */ - @JsonProperty("exchange_id") private String exchangeId; /** */ - @JsonProperty("listing_id") private String listingId; public AddExchangeForListingRequest setExchangeId(String exchangeId) { @@ -55,4 +65,43 @@ public String toString() { .add("listingId", listingId) .toString(); } + + AddExchangeForListingRequestPb toPb() { + AddExchangeForListingRequestPb pb = new AddExchangeForListingRequestPb(); + pb.setExchangeId(exchangeId); + pb.setListingId(listingId); + + return pb; + } + + static AddExchangeForListingRequest fromPb(AddExchangeForListingRequestPb pb) { + AddExchangeForListingRequest model = new AddExchangeForListingRequest(); + model.setExchangeId(pb.getExchangeId()); + model.setListingId(pb.getListingId()); + + return model; + } + + public static class AddExchangeForListingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + AddExchangeForListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddExchangeForListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddExchangeForListingRequestDeserializer + extends JsonDeserializer { + @Override + public AddExchangeForListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddExchangeForListingRequestPb pb = mapper.readValue(p, AddExchangeForListingRequestPb.class); + return AddExchangeForListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequestPb.java new file mode 100755 index 000000000..870b3a35e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AddExchangeForListingRequestPb { + @JsonProperty("exchange_id") + private String exchangeId; + + @JsonProperty("listing_id") + private String listingId; + + public AddExchangeForListingRequestPb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public AddExchangeForListingRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddExchangeForListingRequestPb that = (AddExchangeForListingRequestPb) o; + return Objects.equals(exchangeId, that.exchangeId) && Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, listingId); + } + + @Override + public String toString() { + return new ToStringer(AddExchangeForListingRequestPb.class) + .add("exchangeId", exchangeId) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java index 4dc2a5d00..400904a0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AddExchangeForListingResponse.AddExchangeForListingResponseSerializer.class) +@JsonDeserialize( + using = AddExchangeForListingResponse.AddExchangeForListingResponseDeserializer.class) public class AddExchangeForListingResponse { /** */ - @JsonProperty("exchange_for_listing") private ExchangeListing exchangeForListing; public AddExchangeForListingResponse setExchangeForListing(ExchangeListing exchangeForListing) { @@ -41,4 +52,42 @@ public String toString() { .add("exchangeForListing", exchangeForListing) .toString(); } + + AddExchangeForListingResponsePb toPb() { + AddExchangeForListingResponsePb pb = new AddExchangeForListingResponsePb(); + pb.setExchangeForListing(exchangeForListing); + + return pb; + } + + static AddExchangeForListingResponse fromPb(AddExchangeForListingResponsePb pb) { + AddExchangeForListingResponse model = new AddExchangeForListingResponse(); + model.setExchangeForListing(pb.getExchangeForListing()); + + return model; + } + + public static class AddExchangeForListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + AddExchangeForListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AddExchangeForListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AddExchangeForListingResponseDeserializer + extends JsonDeserializer { + @Override + public AddExchangeForListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AddExchangeForListingResponsePb pb = + mapper.readValue(p, AddExchangeForListingResponsePb.class); + return AddExchangeForListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponsePb.java new file mode 100755 index 000000000..625b5500e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AddExchangeForListingResponsePb { + @JsonProperty("exchange_for_listing") + private ExchangeListing exchangeForListing; + + public AddExchangeForListingResponsePb setExchangeForListing(ExchangeListing exchangeForListing) { + this.exchangeForListing = exchangeForListing; + return this; + } + + public ExchangeListing getExchangeForListing() { + return exchangeForListing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddExchangeForListingResponsePb that = (AddExchangeForListingResponsePb) o; + return Objects.equals(exchangeForListing, that.exchangeForListing); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeForListing); + } + + @Override + public String toString() { + return new ToStringer(AddExchangeForListingResponsePb.class) + .add("exchangeForListing", exchangeForListing) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequest.java index 6846db013..a78c88d67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequest.java @@ -3,18 +3,26 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Get one batch of listings. One may specify up to 50 IDs per request. */ @Generated +@JsonSerialize(using = BatchGetListingsRequest.BatchGetListingsRequestSerializer.class) +@JsonDeserialize(using = BatchGetListingsRequest.BatchGetListingsRequestDeserializer.class) public class BatchGetListingsRequest { /** */ - @JsonIgnore - @QueryParam("ids") private Collection ids; public BatchGetListingsRequest setIds(Collection ids) { @@ -43,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(BatchGetListingsRequest.class).add("ids", ids).toString(); } + + BatchGetListingsRequestPb toPb() { + BatchGetListingsRequestPb pb = new BatchGetListingsRequestPb(); + pb.setIds(ids); + + return pb; + } + + static BatchGetListingsRequest fromPb(BatchGetListingsRequestPb pb) { + BatchGetListingsRequest model = new BatchGetListingsRequest(); + model.setIds(pb.getIds()); + + return model; + } + + public static class BatchGetListingsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + BatchGetListingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BatchGetListingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BatchGetListingsRequestDeserializer + extends JsonDeserializer { + @Override + public BatchGetListingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BatchGetListingsRequestPb pb = mapper.readValue(p, BatchGetListingsRequestPb.class); + return BatchGetListingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequestPb.java new file mode 100755 index 000000000..5c0598c80 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Get one batch of listings. One may specify up to 50 IDs per request. */ +@Generated +class BatchGetListingsRequestPb { + @JsonIgnore + @QueryParam("ids") + private Collection ids; + + public BatchGetListingsRequestPb setIds(Collection ids) { + this.ids = ids; + return this; + } + + public Collection getIds() { + return ids; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BatchGetListingsRequestPb that = (BatchGetListingsRequestPb) o; + return Objects.equals(ids, that.ids); + } + + @Override + public int hashCode() { + return Objects.hash(ids); + } + + @Override + public String toString() { + return new ToStringer(BatchGetListingsRequestPb.class).add("ids", ids).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponse.java index ed1ce2dda..bc72ba507 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = BatchGetListingsResponse.BatchGetListingsResponseSerializer.class) +@JsonDeserialize(using = BatchGetListingsResponse.BatchGetListingsResponseDeserializer.class) public class BatchGetListingsResponse { /** */ - @JsonProperty("listings") private Collection

listings; public BatchGetListingsResponse setListings(Collection listings) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(BatchGetListingsResponse.class).add("listings", listings).toString(); } + + BatchGetListingsResponsePb toPb() { + BatchGetListingsResponsePb pb = new BatchGetListingsResponsePb(); + pb.setListings(listings); + + return pb; + } + + static BatchGetListingsResponse fromPb(BatchGetListingsResponsePb pb) { + BatchGetListingsResponse model = new BatchGetListingsResponse(); + model.setListings(pb.getListings()); + + return model; + } + + public static class BatchGetListingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + BatchGetListingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BatchGetListingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BatchGetListingsResponseDeserializer + extends JsonDeserializer { + @Override + public BatchGetListingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BatchGetListingsResponsePb pb = mapper.readValue(p, BatchGetListingsResponsePb.class); + return BatchGetListingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponsePb.java new file mode 100755 index 000000000..a729b3c3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetListingsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BatchGetListingsResponsePb { + @JsonProperty("listings") + private Collection listings; + + public BatchGetListingsResponsePb setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BatchGetListingsResponsePb that = (BatchGetListingsResponsePb) o; + return Objects.equals(listings, that.listings); + } + + @Override + public int hashCode() { + return Objects.hash(listings); + } + + @Override + public String toString() { + return new ToStringer(BatchGetListingsResponsePb.class).add("listings", listings).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequest.java index 41788168d..2990182b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequest.java @@ -3,18 +3,26 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Get one batch of providers. One may specify up to 50 IDs per request. */ @Generated +@JsonSerialize(using = BatchGetProvidersRequest.BatchGetProvidersRequestSerializer.class) +@JsonDeserialize(using = BatchGetProvidersRequest.BatchGetProvidersRequestDeserializer.class) public class BatchGetProvidersRequest { /** */ - @JsonIgnore - @QueryParam("ids") private Collection ids; public BatchGetProvidersRequest setIds(Collection ids) { @@ -43,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(BatchGetProvidersRequest.class).add("ids", ids).toString(); } + + BatchGetProvidersRequestPb toPb() { + BatchGetProvidersRequestPb pb = new BatchGetProvidersRequestPb(); + pb.setIds(ids); + + return pb; + } + + static BatchGetProvidersRequest fromPb(BatchGetProvidersRequestPb pb) { + BatchGetProvidersRequest model = new BatchGetProvidersRequest(); + model.setIds(pb.getIds()); + + return model; + } + + public static class BatchGetProvidersRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + BatchGetProvidersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BatchGetProvidersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BatchGetProvidersRequestDeserializer + extends JsonDeserializer { + @Override + public BatchGetProvidersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BatchGetProvidersRequestPb pb = mapper.readValue(p, BatchGetProvidersRequestPb.class); + return BatchGetProvidersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequestPb.java new file mode 100755 index 000000000..454257e77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Get one batch of providers. One may specify up to 50 IDs per request. */ +@Generated +class BatchGetProvidersRequestPb { + @JsonIgnore + @QueryParam("ids") + private Collection ids; + + public BatchGetProvidersRequestPb setIds(Collection ids) { + this.ids = ids; + return this; + } + + public Collection getIds() { + return ids; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BatchGetProvidersRequestPb that = (BatchGetProvidersRequestPb) o; + return Objects.equals(ids, that.ids); + } + + @Override + public int hashCode() { + return Objects.hash(ids); + } + + @Override + public String toString() { + return new ToStringer(BatchGetProvidersRequestPb.class).add("ids", ids).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponse.java index 96ab8e162..d173922be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = BatchGetProvidersResponse.BatchGetProvidersResponseSerializer.class) +@JsonDeserialize(using = BatchGetProvidersResponse.BatchGetProvidersResponseDeserializer.class) public class BatchGetProvidersResponse { /** */ - @JsonProperty("providers") private Collection providers; public BatchGetProvidersResponse setProviders(Collection providers) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(BatchGetProvidersResponse.class).add("providers", providers).toString(); } + + BatchGetProvidersResponsePb toPb() { + BatchGetProvidersResponsePb pb = new BatchGetProvidersResponsePb(); + pb.setProviders(providers); + + return pb; + } + + static BatchGetProvidersResponse fromPb(BatchGetProvidersResponsePb pb) { + BatchGetProvidersResponse model = new BatchGetProvidersResponse(); + model.setProviders(pb.getProviders()); + + return model; + } + + public static class BatchGetProvidersResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + BatchGetProvidersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BatchGetProvidersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BatchGetProvidersResponseDeserializer + extends JsonDeserializer { + @Override + public BatchGetProvidersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BatchGetProvidersResponsePb pb = mapper.readValue(p, BatchGetProvidersResponsePb.class); + return BatchGetProvidersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponsePb.java new file mode 100755 index 000000000..478c8e66b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/BatchGetProvidersResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class BatchGetProvidersResponsePb { + @JsonProperty("providers") + private Collection providers; + + public BatchGetProvidersResponsePb setProviders(Collection providers) { + this.providers = providers; + return this; + } + + public Collection getProviders() { + return providers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BatchGetProvidersResponsePb that = (BatchGetProvidersResponsePb) o; + return Objects.equals(providers, that.providers); + } + + @Override + public int hashCode() { + return Objects.hash(providers); + } + + @Override + public String toString() { + return new ToStringer(BatchGetProvidersResponsePb.class).add("providers", providers).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java index 2a781b223..0b9e74321 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java @@ -22,7 +22,7 @@ public GetListingContentMetadataResponse get(GetListingContentMetadataRequest re String.format("/api/2.1/marketplace-consumer/listings/%s/content", request.getListingId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetListingContentMetadataResponse.class); } catch (IOException e) { @@ -37,7 +37,7 @@ public ListFulfillmentsResponse list(ListFulfillmentsRequest request) { "/api/2.1/marketplace-consumer/listings/%s/fulfillments", request.getListingId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFulfillmentsResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java index c817eb350..ac761374f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java @@ -23,7 +23,7 @@ public Installation create(CreateInstallationRequest request) { "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Installation.class); @@ -40,7 +40,7 @@ public void delete(DeleteInstallationRequest request) { request.getListingId(), request.getInstallationId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteInstallationResponse.class); } catch (IOException e) { @@ -53,7 +53,7 @@ public ListAllInstallationsResponse list(ListAllInstallationsRequest request) { String path = "/api/2.1/marketplace-consumer/installations"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAllInstallationsResponse.class); } catch (IOException e) { @@ -68,7 +68,7 @@ public ListInstallationsResponse listListingInstallations(ListInstallationsReque "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListInstallationsResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public UpdateInstallationResponse update(UpdateInstallationRequest request) { request.getListingId(), request.getInstallationId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateInstallationResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java index c1177e723..aa7e64e88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java @@ -21,7 +21,7 @@ public BatchGetListingsResponse batchGet(BatchGetListingsRequest request) { String path = "/api/2.1/marketplace-consumer/listings:batchGet"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, BatchGetListingsResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public GetListingResponse get(GetListingRequest request) { String path = String.format("/api/2.1/marketplace-consumer/listings/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetListingResponse.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public ListListingsResponse list(ListListingsRequest request) { String path = "/api/2.1/marketplace-consumer/listings"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListListingsResponse.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public SearchListingsResponse search(SearchListingsRequest request) { String path = "/api/2.1/marketplace-consumer/search-listings"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SearchListingsResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java index be0d25da4..b829dd07c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java @@ -24,7 +24,7 @@ public CreatePersonalizationRequestResponse create(CreatePersonalizationRequest request.getListingId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreatePersonalizationRequestResponse.class); @@ -41,7 +41,7 @@ public GetPersonalizationRequestResponse get(GetPersonalizationRequestRequest re request.getListingId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPersonalizationRequestResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public ListAllPersonalizationRequestsResponse list( String path = "/api/2.1/marketplace-consumer/personalization-requests"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAllPersonalizationRequestsResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java index f767a09f5..c507fbd93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java @@ -21,7 +21,7 @@ public BatchGetProvidersResponse batchGet(BatchGetProvidersRequest request) { String path = "/api/2.1/marketplace-consumer/providers:batchGet"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, BatchGetProvidersResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public GetProviderResponse get(GetProviderRequest request) { String path = String.format("/api/2.1/marketplace-consumer/providers/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetProviderResponse.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.1/marketplace-consumer/providers"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListProvidersResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java index 4be619f52..13ef6f3ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ConsumerTerms.ConsumerTermsSerializer.class) +@JsonDeserialize(using = ConsumerTerms.ConsumerTermsDeserializer.class) public class ConsumerTerms { /** */ - @JsonProperty("version") private String version; public ConsumerTerms setVersion(String version) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(ConsumerTerms.class).add("version", version).toString(); } + + ConsumerTermsPb toPb() { + ConsumerTermsPb pb = new ConsumerTermsPb(); + pb.setVersion(version); + + return pb; + } + + static ConsumerTerms fromPb(ConsumerTermsPb pb) { + ConsumerTerms model = new ConsumerTerms(); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ConsumerTermsSerializer extends JsonSerializer { + @Override + public void serialize(ConsumerTerms value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ConsumerTermsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ConsumerTermsDeserializer extends JsonDeserializer { + @Override + public ConsumerTerms deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ConsumerTermsPb pb = mapper.readValue(p, ConsumerTermsPb.class); + return ConsumerTerms.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTermsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTermsPb.java new file mode 100755 index 000000000..5cb3f54f8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTermsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ConsumerTermsPb { + @JsonProperty("version") + private String version; + + public ConsumerTermsPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConsumerTermsPb that = (ConsumerTermsPb) o; + return Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(version); + } + + @Override + public String toString() { + return new ToStringer(ConsumerTermsPb.class).add("version", version).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java index e0e4135e6..4d08fc277 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** contact info for the consumer requesting data or performing a listing installation */ @Generated +@JsonSerialize(using = ContactInfo.ContactInfoSerializer.class) +@JsonDeserialize(using = ContactInfo.ContactInfoDeserializer.class) public class ContactInfo { /** */ - @JsonProperty("company") private String company; /** */ - @JsonProperty("email") private String email; /** */ - @JsonProperty("first_name") private String firstName; /** */ - @JsonProperty("last_name") private String lastName; public ContactInfo setCompany(String company) { @@ -87,4 +94,43 @@ public String toString() { .add("lastName", lastName) .toString(); } + + ContactInfoPb toPb() { + ContactInfoPb pb = new ContactInfoPb(); + pb.setCompany(company); + pb.setEmail(email); + pb.setFirstName(firstName); + pb.setLastName(lastName); + + return pb; + } + + static ContactInfo fromPb(ContactInfoPb pb) { + ContactInfo model = new ContactInfo(); + model.setCompany(pb.getCompany()); + model.setEmail(pb.getEmail()); + model.setFirstName(pb.getFirstName()); + model.setLastName(pb.getLastName()); + + return model; + } + + public static class ContactInfoSerializer extends JsonSerializer { + @Override + public void serialize(ContactInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ContactInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ContactInfoDeserializer extends JsonDeserializer { + @Override + public ContactInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ContactInfoPb pb = mapper.readValue(p, ContactInfoPb.class); + return ContactInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfoPb.java new file mode 100755 index 000000000..32e237a82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfoPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** contact info for the consumer requesting data or performing a listing installation */ +@Generated +class ContactInfoPb { + @JsonProperty("company") + private String company; + + @JsonProperty("email") + private String email; + + @JsonProperty("first_name") + private String firstName; + + @JsonProperty("last_name") + private String lastName; + + public ContactInfoPb setCompany(String company) { + this.company = company; + return this; + } + + public String getCompany() { + return company; + } + + public ContactInfoPb setEmail(String email) { + this.email = email; + return this; + } + + public String getEmail() { + return email; + } + + public ContactInfoPb setFirstName(String firstName) { + this.firstName = firstName; + return this; + } + + public String getFirstName() { + return firstName; + } + + public ContactInfoPb setLastName(String lastName) { + this.lastName = lastName; + return this; + } + + public String getLastName() { + return lastName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContactInfoPb that = (ContactInfoPb) o; + return Objects.equals(company, that.company) + && Objects.equals(email, that.email) + && Objects.equals(firstName, that.firstName) + && Objects.equals(lastName, that.lastName); + } + + @Override + public int hashCode() { + return Objects.hash(company, email, firstName, lastName); + } + + @Override + public String toString() { + return new ToStringer(ContactInfoPb.class) + .add("company", company) + .add("email", email) + .add("firstName", firstName) + .add("lastName", lastName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Converters.java new file mode 100755 index 000000000..ba87cb4ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.marketplace; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java index 964a88910..5d0df5fed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExchangeFilterRequest.CreateExchangeFilterRequestSerializer.class) +@JsonDeserialize(using = CreateExchangeFilterRequest.CreateExchangeFilterRequestDeserializer.class) public class CreateExchangeFilterRequest { /** */ - @JsonProperty("filter") private ExchangeFilter filter; public CreateExchangeFilterRequest setFilter(ExchangeFilter filter) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateExchangeFilterRequest.class).add("filter", filter).toString(); } + + CreateExchangeFilterRequestPb toPb() { + CreateExchangeFilterRequestPb pb = new CreateExchangeFilterRequestPb(); + pb.setFilter(filter); + + return pb; + } + + static CreateExchangeFilterRequest fromPb(CreateExchangeFilterRequestPb pb) { + CreateExchangeFilterRequest model = new CreateExchangeFilterRequest(); + model.setFilter(pb.getFilter()); + + return model; + } + + public static class CreateExchangeFilterRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExchangeFilterRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExchangeFilterRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExchangeFilterRequestDeserializer + extends JsonDeserializer { + @Override + public CreateExchangeFilterRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExchangeFilterRequestPb pb = mapper.readValue(p, CreateExchangeFilterRequestPb.class); + return CreateExchangeFilterRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequestPb.java new file mode 100755 index 000000000..b170d9372 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExchangeFilterRequestPb { + @JsonProperty("filter") + private ExchangeFilter filter; + + public CreateExchangeFilterRequestPb setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeFilterRequestPb that = (CreateExchangeFilterRequestPb) o; + return Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeFilterRequestPb.class).add("filter", filter).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java index 152a5baa4..0a96c615b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExchangeFilterResponse.CreateExchangeFilterResponseSerializer.class) +@JsonDeserialize( + using = CreateExchangeFilterResponse.CreateExchangeFilterResponseDeserializer.class) public class CreateExchangeFilterResponse { /** */ - @JsonProperty("filter_id") private String filterId; public CreateExchangeFilterResponse setFilterId(String filterId) { @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateExchangeFilterResponse.class).add("filterId", filterId).toString(); } + + CreateExchangeFilterResponsePb toPb() { + CreateExchangeFilterResponsePb pb = new CreateExchangeFilterResponsePb(); + pb.setFilterId(filterId); + + return pb; + } + + static CreateExchangeFilterResponse fromPb(CreateExchangeFilterResponsePb pb) { + CreateExchangeFilterResponse model = new CreateExchangeFilterResponse(); + model.setFilterId(pb.getFilterId()); + + return model; + } + + public static class CreateExchangeFilterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExchangeFilterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExchangeFilterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExchangeFilterResponseDeserializer + extends JsonDeserializer { + @Override + public CreateExchangeFilterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExchangeFilterResponsePb pb = mapper.readValue(p, CreateExchangeFilterResponsePb.class); + return CreateExchangeFilterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponsePb.java new file mode 100755 index 000000000..bd08c2b0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExchangeFilterResponsePb { + @JsonProperty("filter_id") + private String filterId; + + public CreateExchangeFilterResponsePb setFilterId(String filterId) { + this.filterId = filterId; + return this; + } + + public String getFilterId() { + return filterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeFilterResponsePb that = (CreateExchangeFilterResponsePb) o; + return Objects.equals(filterId, that.filterId); + } + + @Override + public int hashCode() { + return Objects.hash(filterId); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeFilterResponsePb.class) + .add("filterId", filterId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java index 09e19edee..b5d9e4268 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExchangeRequest.CreateExchangeRequestSerializer.class) +@JsonDeserialize(using = CreateExchangeRequest.CreateExchangeRequestDeserializer.class) public class CreateExchangeRequest { /** */ - @JsonProperty("exchange") private Exchange exchange; public CreateExchangeRequest setExchange(Exchange exchange) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateExchangeRequest.class).add("exchange", exchange).toString(); } + + CreateExchangeRequestPb toPb() { + CreateExchangeRequestPb pb = new CreateExchangeRequestPb(); + pb.setExchange(exchange); + + return pb; + } + + static CreateExchangeRequest fromPb(CreateExchangeRequestPb pb) { + CreateExchangeRequest model = new CreateExchangeRequest(); + model.setExchange(pb.getExchange()); + + return model; + } + + public static class CreateExchangeRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExchangeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExchangeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExchangeRequestDeserializer + extends JsonDeserializer { + @Override + public CreateExchangeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExchangeRequestPb pb = mapper.readValue(p, CreateExchangeRequestPb.class); + return CreateExchangeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequestPb.java new file mode 100755 index 000000000..4b79bfe84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExchangeRequestPb { + @JsonProperty("exchange") + private Exchange exchange; + + public CreateExchangeRequestPb setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeRequestPb that = (CreateExchangeRequestPb) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeRequestPb.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java index 528bf3ff7..a30b52247 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExchangeResponse.CreateExchangeResponseSerializer.class) +@JsonDeserialize(using = CreateExchangeResponse.CreateExchangeResponseDeserializer.class) public class CreateExchangeResponse { /** */ - @JsonProperty("exchange_id") private String exchangeId; public CreateExchangeResponse setExchangeId(String exchangeId) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateExchangeResponse.class).add("exchangeId", exchangeId).toString(); } + + CreateExchangeResponsePb toPb() { + CreateExchangeResponsePb pb = new CreateExchangeResponsePb(); + pb.setExchangeId(exchangeId); + + return pb; + } + + static CreateExchangeResponse fromPb(CreateExchangeResponsePb pb) { + CreateExchangeResponse model = new CreateExchangeResponse(); + model.setExchangeId(pb.getExchangeId()); + + return model; + } + + public static class CreateExchangeResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExchangeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExchangeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExchangeResponseDeserializer + extends JsonDeserializer { + @Override + public CreateExchangeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExchangeResponsePb pb = mapper.readValue(p, CreateExchangeResponsePb.class); + return CreateExchangeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponsePb.java new file mode 100755 index 000000000..0dab66d57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExchangeResponsePb { + @JsonProperty("exchange_id") + private String exchangeId; + + public CreateExchangeResponsePb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeResponsePb that = (CreateExchangeResponsePb) o; + return Objects.equals(exchangeId, that.exchangeId); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeResponsePb.class).add("exchangeId", exchangeId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java index 886bbdefc..4b5e85767 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateFileRequest.CreateFileRequestSerializer.class) +@JsonDeserialize(using = CreateFileRequest.CreateFileRequestDeserializer.class) public class CreateFileRequest { /** */ - @JsonProperty("display_name") private String displayName; /** */ - @JsonProperty("file_parent") private FileParent fileParent; /** */ - @JsonProperty("marketplace_file_type") private MarketplaceFileType marketplaceFileType; /** */ - @JsonProperty("mime_type") private String mimeType; public CreateFileRequest setDisplayName(String displayName) { @@ -86,4 +93,44 @@ public String toString() { .add("mimeType", mimeType) .toString(); } + + CreateFileRequestPb toPb() { + CreateFileRequestPb pb = new CreateFileRequestPb(); + pb.setDisplayName(displayName); + pb.setFileParent(fileParent); + pb.setMarketplaceFileType(marketplaceFileType); + pb.setMimeType(mimeType); + + return pb; + } + + static CreateFileRequest fromPb(CreateFileRequestPb pb) { + CreateFileRequest model = new CreateFileRequest(); + model.setDisplayName(pb.getDisplayName()); + model.setFileParent(pb.getFileParent()); + model.setMarketplaceFileType(pb.getMarketplaceFileType()); + model.setMimeType(pb.getMimeType()); + + return model; + } + + public static class CreateFileRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateFileRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateFileRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateFileRequestDeserializer extends JsonDeserializer { + @Override + public CreateFileRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateFileRequestPb pb = mapper.readValue(p, CreateFileRequestPb.class); + return CreateFileRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequestPb.java new file mode 100755 index 000000000..210e97c01 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateFileRequestPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("file_parent") + private FileParent fileParent; + + @JsonProperty("marketplace_file_type") + private MarketplaceFileType marketplaceFileType; + + @JsonProperty("mime_type") + private String mimeType; + + public CreateFileRequestPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateFileRequestPb setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public CreateFileRequestPb setMarketplaceFileType(MarketplaceFileType marketplaceFileType) { + this.marketplaceFileType = marketplaceFileType; + return this; + } + + public MarketplaceFileType getMarketplaceFileType() { + return marketplaceFileType; + } + + public CreateFileRequestPb setMimeType(String mimeType) { + this.mimeType = mimeType; + return this; + } + + public String getMimeType() { + return mimeType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFileRequestPb that = (CreateFileRequestPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(fileParent, that.fileParent) + && Objects.equals(marketplaceFileType, that.marketplaceFileType) + && Objects.equals(mimeType, that.mimeType); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, fileParent, marketplaceFileType, mimeType); + } + + @Override + public String toString() { + return new ToStringer(CreateFileRequestPb.class) + .add("displayName", displayName) + .add("fileParent", fileParent) + .add("marketplaceFileType", marketplaceFileType) + .add("mimeType", mimeType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java index a980c61d1..075099684 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateFileResponse.CreateFileResponseSerializer.class) +@JsonDeserialize(using = CreateFileResponse.CreateFileResponseDeserializer.class) public class CreateFileResponse { /** */ - @JsonProperty("file_info") private FileInfo fileInfo; /** Pre-signed POST URL to blob storage */ - @JsonProperty("upload_url") private String uploadUrl; public CreateFileResponse setFileInfo(FileInfo fileInfo) { @@ -55,4 +64,40 @@ public String toString() { .add("uploadUrl", uploadUrl) .toString(); } + + CreateFileResponsePb toPb() { + CreateFileResponsePb pb = new CreateFileResponsePb(); + pb.setFileInfo(fileInfo); + pb.setUploadUrl(uploadUrl); + + return pb; + } + + static CreateFileResponse fromPb(CreateFileResponsePb pb) { + CreateFileResponse model = new CreateFileResponse(); + model.setFileInfo(pb.getFileInfo()); + model.setUploadUrl(pb.getUploadUrl()); + + return model; + } + + public static class CreateFileResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateFileResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateFileResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateFileResponseDeserializer extends JsonDeserializer { + @Override + public CreateFileResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateFileResponsePb pb = mapper.readValue(p, CreateFileResponsePb.class); + return CreateFileResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponsePb.java new file mode 100755 index 000000000..bf55c5bd0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateFileResponsePb { + @JsonProperty("file_info") + private FileInfo fileInfo; + + @JsonProperty("upload_url") + private String uploadUrl; + + public CreateFileResponsePb setFileInfo(FileInfo fileInfo) { + this.fileInfo = fileInfo; + return this; + } + + public FileInfo getFileInfo() { + return fileInfo; + } + + public CreateFileResponsePb setUploadUrl(String uploadUrl) { + this.uploadUrl = uploadUrl; + return this; + } + + public String getUploadUrl() { + return uploadUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFileResponsePb that = (CreateFileResponsePb) o; + return Objects.equals(fileInfo, that.fileInfo) && Objects.equals(uploadUrl, that.uploadUrl); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfo, uploadUrl); + } + + @Override + public String toString() { + return new ToStringer(CreateFileResponsePb.class) + .add("fileInfo", fileInfo) + .add("uploadUrl", uploadUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java index 422d84cd8..e56c6950a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateInstallationRequest.CreateInstallationRequestSerializer.class) +@JsonDeserialize(using = CreateInstallationRequest.CreateInstallationRequestDeserializer.class) public class CreateInstallationRequest { /** */ - @JsonProperty("accepted_consumer_terms") private ConsumerTerms acceptedConsumerTerms; /** */ - @JsonProperty("catalog_name") private String catalogName; /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonProperty("recipient_type") private DeltaSharingRecipientType recipientType; /** for git repo installations */ - @JsonProperty("repo_detail") private RepoInstallation repoDetail; /** */ - @JsonProperty("share_name") private String shareName; public CreateInstallationRequest setAcceptedConsumerTerms(ConsumerTerms acceptedConsumerTerms) { @@ -117,4 +122,51 @@ public String toString() { .add("shareName", shareName) .toString(); } + + CreateInstallationRequestPb toPb() { + CreateInstallationRequestPb pb = new CreateInstallationRequestPb(); + pb.setAcceptedConsumerTerms(acceptedConsumerTerms); + pb.setCatalogName(catalogName); + pb.setListingId(listingId); + pb.setRecipientType(recipientType); + pb.setRepoDetail(repoDetail); + pb.setShareName(shareName); + + return pb; + } + + static CreateInstallationRequest fromPb(CreateInstallationRequestPb pb) { + CreateInstallationRequest model = new CreateInstallationRequest(); + model.setAcceptedConsumerTerms(pb.getAcceptedConsumerTerms()); + model.setCatalogName(pb.getCatalogName()); + model.setListingId(pb.getListingId()); + model.setRecipientType(pb.getRecipientType()); + model.setRepoDetail(pb.getRepoDetail()); + model.setShareName(pb.getShareName()); + + return model; + } + + public static class CreateInstallationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateInstallationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateInstallationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateInstallationRequestDeserializer + extends JsonDeserializer { + @Override + public CreateInstallationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateInstallationRequestPb pb = mapper.readValue(p, CreateInstallationRequestPb.class); + return CreateInstallationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequestPb.java new file mode 100755 index 000000000..2e455cff2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequestPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateInstallationRequestPb { + @JsonProperty("accepted_consumer_terms") + private ConsumerTerms acceptedConsumerTerms; + + @JsonProperty("catalog_name") + private String catalogName; + + @JsonIgnore private String listingId; + + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + @JsonProperty("repo_detail") + private RepoInstallation repoDetail; + + @JsonProperty("share_name") + private String shareName; + + public CreateInstallationRequestPb setAcceptedConsumerTerms(ConsumerTerms acceptedConsumerTerms) { + this.acceptedConsumerTerms = acceptedConsumerTerms; + return this; + } + + public ConsumerTerms getAcceptedConsumerTerms() { + return acceptedConsumerTerms; + } + + public CreateInstallationRequestPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateInstallationRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public CreateInstallationRequestPb setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public CreateInstallationRequestPb setRepoDetail(RepoInstallation repoDetail) { + this.repoDetail = repoDetail; + return this; + } + + public RepoInstallation getRepoDetail() { + return repoDetail; + } + + public CreateInstallationRequestPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateInstallationRequestPb that = (CreateInstallationRequestPb) o; + return Objects.equals(acceptedConsumerTerms, that.acceptedConsumerTerms) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoDetail, that.repoDetail) + && Objects.equals(shareName, that.shareName); + } + + @Override + public int hashCode() { + return Objects.hash( + acceptedConsumerTerms, catalogName, listingId, recipientType, repoDetail, shareName); + } + + @Override + public String toString() { + return new ToStringer(CreateInstallationRequestPb.class) + .add("acceptedConsumerTerms", acceptedConsumerTerms) + .add("catalogName", catalogName) + .add("listingId", listingId) + .add("recipientType", recipientType) + .add("repoDetail", repoDetail) + .add("shareName", shareName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java index 0f89c08ed..d09a44711 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateListingRequest.CreateListingRequestSerializer.class) +@JsonDeserialize(using = CreateListingRequest.CreateListingRequestDeserializer.class) public class CreateListingRequest { /** */ - @JsonProperty("listing") private Listing listing; public CreateListingRequest setListing(Listing listing) { @@ -39,4 +49,40 @@ public int hashCode() { public String toString() { return new ToStringer(CreateListingRequest.class).add("listing", listing).toString(); } + + CreateListingRequestPb toPb() { + CreateListingRequestPb pb = new CreateListingRequestPb(); + pb.setListing(listing); + + return pb; + } + + static CreateListingRequest fromPb(CreateListingRequestPb pb) { + CreateListingRequest model = new CreateListingRequest(); + model.setListing(pb.getListing()); + + return model; + } + + public static class CreateListingRequestSerializer extends JsonSerializer { + @Override + public void serialize( + CreateListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateListingRequestDeserializer + extends JsonDeserializer { + @Override + public CreateListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateListingRequestPb pb = mapper.readValue(p, CreateListingRequestPb.class); + return CreateListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequestPb.java new file mode 100755 index 000000000..400eab154 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateListingRequestPb { + @JsonProperty("listing") + private Listing listing; + + public CreateListingRequestPb setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateListingRequestPb that = (CreateListingRequestPb) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(CreateListingRequestPb.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java index 39814421b..55aab263a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateListingResponse.CreateListingResponseSerializer.class) +@JsonDeserialize(using = CreateListingResponse.CreateListingResponseDeserializer.class) public class CreateListingResponse { /** */ - @JsonProperty("listing_id") private String listingId; public CreateListingResponse setListingId(String listingId) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateListingResponse.class).add("listingId", listingId).toString(); } + + CreateListingResponsePb toPb() { + CreateListingResponsePb pb = new CreateListingResponsePb(); + pb.setListingId(listingId); + + return pb; + } + + static CreateListingResponse fromPb(CreateListingResponsePb pb) { + CreateListingResponse model = new CreateListingResponse(); + model.setListingId(pb.getListingId()); + + return model; + } + + public static class CreateListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateListingResponseDeserializer + extends JsonDeserializer { + @Override + public CreateListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateListingResponsePb pb = mapper.readValue(p, CreateListingResponsePb.class); + return CreateListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponsePb.java new file mode 100755 index 000000000..cca7d9212 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateListingResponsePb { + @JsonProperty("listing_id") + private String listingId; + + public CreateListingResponsePb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateListingResponsePb that = (CreateListingResponsePb) o; + return Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(listingId); + } + + @Override + public String toString() { + return new ToStringer(CreateListingResponsePb.class).add("listingId", listingId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java index 11405fa69..438a8a4ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java @@ -4,46 +4,49 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Data request messages also creates a lead (maybe) */ @Generated +@JsonSerialize(using = CreatePersonalizationRequest.CreatePersonalizationRequestSerializer.class) +@JsonDeserialize( + using = CreatePersonalizationRequest.CreatePersonalizationRequestDeserializer.class) public class CreatePersonalizationRequest { /** */ - @JsonProperty("accepted_consumer_terms") private ConsumerTerms acceptedConsumerTerms; /** */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("company") private String company; /** */ - @JsonProperty("first_name") private String firstName; /** */ - @JsonProperty("intended_use") private String intendedUse; /** */ - @JsonProperty("is_from_lighthouse") private Boolean isFromLighthouse; /** */ - @JsonProperty("last_name") private String lastName; /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonProperty("recipient_type") private DeltaSharingRecipientType recipientType; public CreatePersonalizationRequest setAcceptedConsumerTerms( @@ -172,4 +175,57 @@ public String toString() { .add("recipientType", recipientType) .toString(); } + + CreatePersonalizationRequestPb toPb() { + CreatePersonalizationRequestPb pb = new CreatePersonalizationRequestPb(); + pb.setAcceptedConsumerTerms(acceptedConsumerTerms); + pb.setComment(comment); + pb.setCompany(company); + pb.setFirstName(firstName); + pb.setIntendedUse(intendedUse); + pb.setIsFromLighthouse(isFromLighthouse); + pb.setLastName(lastName); + pb.setListingId(listingId); + pb.setRecipientType(recipientType); + + return pb; + } + + static CreatePersonalizationRequest fromPb(CreatePersonalizationRequestPb pb) { + CreatePersonalizationRequest model = new CreatePersonalizationRequest(); + model.setAcceptedConsumerTerms(pb.getAcceptedConsumerTerms()); + model.setComment(pb.getComment()); + model.setCompany(pb.getCompany()); + model.setFirstName(pb.getFirstName()); + model.setIntendedUse(pb.getIntendedUse()); + model.setIsFromLighthouse(pb.getIsFromLighthouse()); + model.setLastName(pb.getLastName()); + model.setListingId(pb.getListingId()); + model.setRecipientType(pb.getRecipientType()); + + return model; + } + + public static class CreatePersonalizationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePersonalizationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePersonalizationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePersonalizationRequestDeserializer + extends JsonDeserializer { + @Override + public CreatePersonalizationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePersonalizationRequestPb pb = mapper.readValue(p, CreatePersonalizationRequestPb.class); + return CreatePersonalizationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestPb.java new file mode 100755 index 000000000..bae7db762 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestPb.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Data request messages also creates a lead (maybe) */ +@Generated +class CreatePersonalizationRequestPb { + @JsonProperty("accepted_consumer_terms") + private ConsumerTerms acceptedConsumerTerms; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("company") + private String company; + + @JsonProperty("first_name") + private String firstName; + + @JsonProperty("intended_use") + private String intendedUse; + + @JsonProperty("is_from_lighthouse") + private Boolean isFromLighthouse; + + @JsonProperty("last_name") + private String lastName; + + @JsonIgnore private String listingId; + + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + public CreatePersonalizationRequestPb setAcceptedConsumerTerms( + ConsumerTerms acceptedConsumerTerms) { + this.acceptedConsumerTerms = acceptedConsumerTerms; + return this; + } + + public ConsumerTerms getAcceptedConsumerTerms() { + return acceptedConsumerTerms; + } + + public CreatePersonalizationRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreatePersonalizationRequestPb setCompany(String company) { + this.company = company; + return this; + } + + public String getCompany() { + return company; + } + + public CreatePersonalizationRequestPb setFirstName(String firstName) { + this.firstName = firstName; + return this; + } + + public String getFirstName() { + return firstName; + } + + public CreatePersonalizationRequestPb setIntendedUse(String intendedUse) { + this.intendedUse = intendedUse; + return this; + } + + public String getIntendedUse() { + return intendedUse; + } + + public CreatePersonalizationRequestPb setIsFromLighthouse(Boolean isFromLighthouse) { + this.isFromLighthouse = isFromLighthouse; + return this; + } + + public Boolean getIsFromLighthouse() { + return isFromLighthouse; + } + + public CreatePersonalizationRequestPb setLastName(String lastName) { + this.lastName = lastName; + return this; + } + + public String getLastName() { + return lastName; + } + + public CreatePersonalizationRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public CreatePersonalizationRequestPb setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePersonalizationRequestPb that = (CreatePersonalizationRequestPb) o; + return Objects.equals(acceptedConsumerTerms, that.acceptedConsumerTerms) + && Objects.equals(comment, that.comment) + && Objects.equals(company, that.company) + && Objects.equals(firstName, that.firstName) + && Objects.equals(intendedUse, that.intendedUse) + && Objects.equals(isFromLighthouse, that.isFromLighthouse) + && Objects.equals(lastName, that.lastName) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType); + } + + @Override + public int hashCode() { + return Objects.hash( + acceptedConsumerTerms, + comment, + company, + firstName, + intendedUse, + isFromLighthouse, + lastName, + listingId, + recipientType); + } + + @Override + public String toString() { + return new ToStringer(CreatePersonalizationRequestPb.class) + .add("acceptedConsumerTerms", acceptedConsumerTerms) + .add("comment", comment) + .add("company", company) + .add("firstName", firstName) + .add("intendedUse", intendedUse) + .add("isFromLighthouse", isFromLighthouse) + .add("lastName", lastName) + .add("listingId", listingId) + .add("recipientType", recipientType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java index 0d8e8ebbd..f9a30a202 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java @@ -4,13 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreatePersonalizationRequestResponse.CreatePersonalizationRequestResponseSerializer.class) +@JsonDeserialize( + using = + CreatePersonalizationRequestResponse.CreatePersonalizationRequestResponseDeserializer.class) public class CreatePersonalizationRequestResponse { /** */ - @JsonProperty("id") private String id; public CreatePersonalizationRequestResponse setId(String id) { @@ -39,4 +53,42 @@ public int hashCode() { public String toString() { return new ToStringer(CreatePersonalizationRequestResponse.class).add("id", id).toString(); } + + CreatePersonalizationRequestResponsePb toPb() { + CreatePersonalizationRequestResponsePb pb = new CreatePersonalizationRequestResponsePb(); + pb.setId(id); + + return pb; + } + + static CreatePersonalizationRequestResponse fromPb(CreatePersonalizationRequestResponsePb pb) { + CreatePersonalizationRequestResponse model = new CreatePersonalizationRequestResponse(); + model.setId(pb.getId()); + + return model; + } + + public static class CreatePersonalizationRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePersonalizationRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePersonalizationRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePersonalizationRequestResponseDeserializer + extends JsonDeserializer { + @Override + public CreatePersonalizationRequestResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePersonalizationRequestResponsePb pb = + mapper.readValue(p, CreatePersonalizationRequestResponsePb.class); + return CreatePersonalizationRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponsePb.java new file mode 100755 index 000000000..fc7fc86a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePersonalizationRequestResponsePb { + @JsonProperty("id") + private String id; + + public CreatePersonalizationRequestResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePersonalizationRequestResponsePb that = (CreatePersonalizationRequestResponsePb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CreatePersonalizationRequestResponsePb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java index ba00a9c20..6020722bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateProviderRequest.CreateProviderRequestSerializer.class) +@JsonDeserialize(using = CreateProviderRequest.CreateProviderRequestDeserializer.class) public class CreateProviderRequest { /** */ - @JsonProperty("provider") private ProviderInfo provider; public CreateProviderRequest setProvider(ProviderInfo provider) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateProviderRequest.class).add("provider", provider).toString(); } + + CreateProviderRequestPb toPb() { + CreateProviderRequestPb pb = new CreateProviderRequestPb(); + pb.setProvider(provider); + + return pb; + } + + static CreateProviderRequest fromPb(CreateProviderRequestPb pb) { + CreateProviderRequest model = new CreateProviderRequest(); + model.setProvider(pb.getProvider()); + + return model; + } + + public static class CreateProviderRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateProviderRequestDeserializer + extends JsonDeserializer { + @Override + public CreateProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateProviderRequestPb pb = mapper.readValue(p, CreateProviderRequestPb.class); + return CreateProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequestPb.java new file mode 100755 index 000000000..e5b562ca9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateProviderRequestPb { + @JsonProperty("provider") + private ProviderInfo provider; + + public CreateProviderRequestPb setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProviderRequestPb that = (CreateProviderRequestPb) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(CreateProviderRequestPb.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java index 7308ff060..b811ef53f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateProviderResponse.CreateProviderResponseSerializer.class) +@JsonDeserialize(using = CreateProviderResponse.CreateProviderResponseDeserializer.class) public class CreateProviderResponse { /** */ - @JsonProperty("id") private String id; public CreateProviderResponse setId(String id) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateProviderResponse.class).add("id", id).toString(); } + + CreateProviderResponsePb toPb() { + CreateProviderResponsePb pb = new CreateProviderResponsePb(); + pb.setId(id); + + return pb; + } + + static CreateProviderResponse fromPb(CreateProviderResponsePb pb) { + CreateProviderResponse model = new CreateProviderResponse(); + model.setId(pb.getId()); + + return model; + } + + public static class CreateProviderResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateProviderResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateProviderResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateProviderResponseDeserializer + extends JsonDeserializer { + @Override + public CreateProviderResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateProviderResponsePb pb = mapper.readValue(p, CreateProviderResponsePb.class); + return CreateProviderResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponsePb.java new file mode 100755 index 000000000..50b2f6e35 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateProviderResponsePb { + @JsonProperty("id") + private String id; + + public CreateProviderResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProviderResponsePb that = (CreateProviderResponsePb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CreateProviderResponsePb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java index cdb67a342..a60c0c59b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DataRefreshInfo.DataRefreshInfoSerializer.class) +@JsonDeserialize(using = DataRefreshInfo.DataRefreshInfoDeserializer.class) public class DataRefreshInfo { /** */ - @JsonProperty("interval") private Long interval; /** */ - @JsonProperty("unit") private DataRefresh unit; public DataRefreshInfo setInterval(Long interval) { @@ -55,4 +64,40 @@ public String toString() { .add("unit", unit) .toString(); } + + DataRefreshInfoPb toPb() { + DataRefreshInfoPb pb = new DataRefreshInfoPb(); + pb.setInterval(interval); + pb.setUnit(unit); + + return pb; + } + + static DataRefreshInfo fromPb(DataRefreshInfoPb pb) { + DataRefreshInfo model = new DataRefreshInfo(); + model.setInterval(pb.getInterval()); + model.setUnit(pb.getUnit()); + + return model; + } + + public static class DataRefreshInfoSerializer extends JsonSerializer { + @Override + public void serialize(DataRefreshInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataRefreshInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataRefreshInfoDeserializer extends JsonDeserializer { + @Override + public DataRefreshInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataRefreshInfoPb pb = mapper.readValue(p, DataRefreshInfoPb.class); + return DataRefreshInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfoPb.java new file mode 100755 index 000000000..fc97fc832 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfoPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DataRefreshInfoPb { + @JsonProperty("interval") + private Long interval; + + @JsonProperty("unit") + private DataRefresh unit; + + public DataRefreshInfoPb setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + public DataRefreshInfoPb setUnit(DataRefresh unit) { + this.unit = unit; + return this; + } + + public DataRefresh getUnit() { + return unit; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataRefreshInfoPb that = (DataRefreshInfoPb) o; + return Objects.equals(interval, that.interval) && Objects.equals(unit, that.unit); + } + + @Override + public int hashCode() { + return Objects.hash(interval, unit); + } + + @Override + public String toString() { + return new ToStringer(DataRefreshInfoPb.class) + .add("interval", interval) + .add("unit", unit) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java index 83cdfa4fa..3f9c23357 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an exchange filter */ @Generated +@JsonSerialize(using = DeleteExchangeFilterRequest.DeleteExchangeFilterRequestSerializer.class) +@JsonDeserialize(using = DeleteExchangeFilterRequest.DeleteExchangeFilterRequestDeserializer.class) public class DeleteExchangeFilterRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteExchangeFilterRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExchangeFilterRequest.class).add("id", id).toString(); } + + DeleteExchangeFilterRequestPb toPb() { + DeleteExchangeFilterRequestPb pb = new DeleteExchangeFilterRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteExchangeFilterRequest fromPb(DeleteExchangeFilterRequestPb pb) { + DeleteExchangeFilterRequest model = new DeleteExchangeFilterRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteExchangeFilterRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExchangeFilterRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExchangeFilterRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExchangeFilterRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteExchangeFilterRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExchangeFilterRequestPb pb = mapper.readValue(p, DeleteExchangeFilterRequestPb.class); + return DeleteExchangeFilterRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequestPb.java new file mode 100755 index 000000000..f18c86a04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an exchange filter */ +@Generated +class DeleteExchangeFilterRequestPb { + @JsonIgnore private String id; + + public DeleteExchangeFilterRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExchangeFilterRequestPb that = (DeleteExchangeFilterRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeFilterRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java index b7cca36ab..2fc0a3ce1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java @@ -4,9 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteExchangeFilterResponse.DeleteExchangeFilterResponseSerializer.class) +@JsonDeserialize( + using = DeleteExchangeFilterResponse.DeleteExchangeFilterResponseDeserializer.class) public class DeleteExchangeFilterResponse { @Override @@ -25,4 +38,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExchangeFilterResponse.class).toString(); } + + DeleteExchangeFilterResponsePb toPb() { + DeleteExchangeFilterResponsePb pb = new DeleteExchangeFilterResponsePb(); + + return pb; + } + + static DeleteExchangeFilterResponse fromPb(DeleteExchangeFilterResponsePb pb) { + DeleteExchangeFilterResponse model = new DeleteExchangeFilterResponse(); + + return model; + } + + public static class DeleteExchangeFilterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExchangeFilterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExchangeFilterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExchangeFilterResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteExchangeFilterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExchangeFilterResponsePb pb = mapper.readValue(p, DeleteExchangeFilterResponsePb.class); + return DeleteExchangeFilterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponsePb.java new file mode 100755 index 000000000..146d4f5c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteExchangeFilterResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeFilterResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java index 8dbf652bf..ecc6fea1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an exchange */ @Generated +@JsonSerialize(using = DeleteExchangeRequest.DeleteExchangeRequestSerializer.class) +@JsonDeserialize(using = DeleteExchangeRequest.DeleteExchangeRequestDeserializer.class) public class DeleteExchangeRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteExchangeRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExchangeRequest.class).add("id", id).toString(); } + + DeleteExchangeRequestPb toPb() { + DeleteExchangeRequestPb pb = new DeleteExchangeRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteExchangeRequest fromPb(DeleteExchangeRequestPb pb) { + DeleteExchangeRequest model = new DeleteExchangeRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteExchangeRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExchangeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExchangeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExchangeRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteExchangeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExchangeRequestPb pb = mapper.readValue(p, DeleteExchangeRequestPb.class); + return DeleteExchangeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequestPb.java new file mode 100755 index 000000000..0c4a75727 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an exchange */ +@Generated +class DeleteExchangeRequestPb { + @JsonIgnore private String id; + + public DeleteExchangeRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExchangeRequestPb that = (DeleteExchangeRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java index be2eb59ea..bf99b79f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteExchangeResponse.DeleteExchangeResponseSerializer.class) +@JsonDeserialize(using = DeleteExchangeResponse.DeleteExchangeResponseDeserializer.class) public class DeleteExchangeResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExchangeResponse.class).toString(); } + + DeleteExchangeResponsePb toPb() { + DeleteExchangeResponsePb pb = new DeleteExchangeResponsePb(); + + return pb; + } + + static DeleteExchangeResponse fromPb(DeleteExchangeResponsePb pb) { + DeleteExchangeResponse model = new DeleteExchangeResponse(); + + return model; + } + + public static class DeleteExchangeResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExchangeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExchangeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExchangeResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteExchangeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExchangeResponsePb pb = mapper.readValue(p, DeleteExchangeResponsePb.class); + return DeleteExchangeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponsePb.java new file mode 100755 index 000000000..1af6d49c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteExchangeResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java index fb7cefd4a..d82eda97d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a file */ @Generated +@JsonSerialize(using = DeleteFileRequest.DeleteFileRequestSerializer.class) +@JsonDeserialize(using = DeleteFileRequest.DeleteFileRequestDeserializer.class) public class DeleteFileRequest { /** */ - @JsonIgnore private String fileId; + private String fileId; public DeleteFileRequest setFileId(String fileId) { this.fileId = fileId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteFileRequest.class).add("fileId", fileId).toString(); } + + DeleteFileRequestPb toPb() { + DeleteFileRequestPb pb = new DeleteFileRequestPb(); + pb.setFileId(fileId); + + return pb; + } + + static DeleteFileRequest fromPb(DeleteFileRequestPb pb) { + DeleteFileRequest model = new DeleteFileRequest(); + model.setFileId(pb.getFileId()); + + return model; + } + + public static class DeleteFileRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteFileRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteFileRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteFileRequestDeserializer extends JsonDeserializer { + @Override + public DeleteFileRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteFileRequestPb pb = mapper.readValue(p, DeleteFileRequestPb.class); + return DeleteFileRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequestPb.java new file mode 100755 index 000000000..887ab5593 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a file */ +@Generated +class DeleteFileRequestPb { + @JsonIgnore private String fileId; + + public DeleteFileRequestPb setFileId(String fileId) { + this.fileId = fileId; + return this; + } + + public String getFileId() { + return fileId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFileRequestPb that = (DeleteFileRequestPb) o; + return Objects.equals(fileId, that.fileId); + } + + @Override + public int hashCode() { + return Objects.hash(fileId); + } + + @Override + public String toString() { + return new ToStringer(DeleteFileRequestPb.class).add("fileId", fileId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java index f799f3705..a3074ff6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteFileResponse.DeleteFileResponseSerializer.class) +@JsonDeserialize(using = DeleteFileResponse.DeleteFileResponseDeserializer.class) public class DeleteFileResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteFileResponse.class).toString(); } + + DeleteFileResponsePb toPb() { + DeleteFileResponsePb pb = new DeleteFileResponsePb(); + + return pb; + } + + static DeleteFileResponse fromPb(DeleteFileResponsePb pb) { + DeleteFileResponse model = new DeleteFileResponse(); + + return model; + } + + public static class DeleteFileResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteFileResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteFileResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteFileResponseDeserializer extends JsonDeserializer { + @Override + public DeleteFileResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteFileResponsePb pb = mapper.readValue(p, DeleteFileResponsePb.class); + return DeleteFileResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponsePb.java new file mode 100755 index 000000000..84d325258 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteFileResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteFileResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java index ab1f87045..46922417f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Uninstall from a listing */ @Generated +@JsonSerialize(using = DeleteInstallationRequest.DeleteInstallationRequestSerializer.class) +@JsonDeserialize(using = DeleteInstallationRequest.DeleteInstallationRequestDeserializer.class) public class DeleteInstallationRequest { /** */ - @JsonIgnore private String installationId; + private String installationId; /** */ - @JsonIgnore private String listingId; + private String listingId; public DeleteInstallationRequest setInstallationId(String installationId) { this.installationId = installationId; @@ -55,4 +66,43 @@ public String toString() { .add("listingId", listingId) .toString(); } + + DeleteInstallationRequestPb toPb() { + DeleteInstallationRequestPb pb = new DeleteInstallationRequestPb(); + pb.setInstallationId(installationId); + pb.setListingId(listingId); + + return pb; + } + + static DeleteInstallationRequest fromPb(DeleteInstallationRequestPb pb) { + DeleteInstallationRequest model = new DeleteInstallationRequest(); + model.setInstallationId(pb.getInstallationId()); + model.setListingId(pb.getListingId()); + + return model; + } + + public static class DeleteInstallationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteInstallationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteInstallationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteInstallationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteInstallationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteInstallationRequestPb pb = mapper.readValue(p, DeleteInstallationRequestPb.class); + return DeleteInstallationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequestPb.java new file mode 100755 index 000000000..0cc2b5154 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Uninstall from a listing */ +@Generated +class DeleteInstallationRequestPb { + @JsonIgnore private String installationId; + + @JsonIgnore private String listingId; + + public DeleteInstallationRequestPb setInstallationId(String installationId) { + this.installationId = installationId; + return this; + } + + public String getInstallationId() { + return installationId; + } + + public DeleteInstallationRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteInstallationRequestPb that = (DeleteInstallationRequestPb) o; + return Objects.equals(installationId, that.installationId) + && Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(installationId, listingId); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstallationRequestPb.class) + .add("installationId", installationId) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java index 0928712ff..e59f9ef4f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteInstallationResponse.DeleteInstallationResponseSerializer.class) +@JsonDeserialize(using = DeleteInstallationResponse.DeleteInstallationResponseDeserializer.class) public class DeleteInstallationResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteInstallationResponse.class).toString(); } + + DeleteInstallationResponsePb toPb() { + DeleteInstallationResponsePb pb = new DeleteInstallationResponsePb(); + + return pb; + } + + static DeleteInstallationResponse fromPb(DeleteInstallationResponsePb pb) { + DeleteInstallationResponse model = new DeleteInstallationResponse(); + + return model; + } + + public static class DeleteInstallationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteInstallationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteInstallationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteInstallationResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteInstallationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteInstallationResponsePb pb = mapper.readValue(p, DeleteInstallationResponsePb.class); + return DeleteInstallationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponsePb.java new file mode 100755 index 000000000..3f597ea4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteInstallationResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstallationResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java index 2fb9506c5..4171b0921 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a listing */ @Generated +@JsonSerialize(using = DeleteListingRequest.DeleteListingRequestSerializer.class) +@JsonDeserialize(using = DeleteListingRequest.DeleteListingRequestDeserializer.class) public class DeleteListingRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteListingRequest setId(String id) { this.id = id; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteListingRequest.class).add("id", id).toString(); } + + DeleteListingRequestPb toPb() { + DeleteListingRequestPb pb = new DeleteListingRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteListingRequest fromPb(DeleteListingRequestPb pb) { + DeleteListingRequest model = new DeleteListingRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteListingRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteListingRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteListingRequestPb pb = mapper.readValue(p, DeleteListingRequestPb.class); + return DeleteListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequestPb.java new file mode 100755 index 000000000..8ad189c0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a listing */ +@Generated +class DeleteListingRequestPb { + @JsonIgnore private String id; + + public DeleteListingRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteListingRequestPb that = (DeleteListingRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteListingRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java index 1250054c3..05a501c19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteListingResponse.DeleteListingResponseSerializer.class) +@JsonDeserialize(using = DeleteListingResponse.DeleteListingResponseDeserializer.class) public class DeleteListingResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteListingResponse.class).toString(); } + + DeleteListingResponsePb toPb() { + DeleteListingResponsePb pb = new DeleteListingResponsePb(); + + return pb; + } + + static DeleteListingResponse fromPb(DeleteListingResponsePb pb) { + DeleteListingResponse model = new DeleteListingResponse(); + + return model; + } + + public static class DeleteListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteListingResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteListingResponsePb pb = mapper.readValue(p, DeleteListingResponsePb.class); + return DeleteListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponsePb.java new file mode 100755 index 000000000..1fee3e95b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteListingResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteListingResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java index 337f31b2f..0da07f694 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete provider */ @Generated +@JsonSerialize(using = DeleteProviderRequest.DeleteProviderRequestSerializer.class) +@JsonDeserialize(using = DeleteProviderRequest.DeleteProviderRequestDeserializer.class) public class DeleteProviderRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteProviderRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteProviderRequest.class).add("id", id).toString(); } + + DeleteProviderRequestPb toPb() { + DeleteProviderRequestPb pb = new DeleteProviderRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteProviderRequest fromPb(DeleteProviderRequestPb pb) { + DeleteProviderRequest model = new DeleteProviderRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteProviderRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteProviderRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteProviderRequestPb pb = mapper.readValue(p, DeleteProviderRequestPb.class); + return DeleteProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequestPb.java new file mode 100755 index 000000000..ebc3764c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete provider */ +@Generated +class DeleteProviderRequestPb { + @JsonIgnore private String id; + + public DeleteProviderRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteProviderRequestPb that = (DeleteProviderRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteProviderRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java index 41f94447f..703b4da13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteProviderResponse.DeleteProviderResponseSerializer.class) +@JsonDeserialize(using = DeleteProviderResponse.DeleteProviderResponseDeserializer.class) public class DeleteProviderResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteProviderResponse.class).toString(); } + + DeleteProviderResponsePb toPb() { + DeleteProviderResponsePb pb = new DeleteProviderResponsePb(); + + return pb; + } + + static DeleteProviderResponse fromPb(DeleteProviderResponsePb pb) { + DeleteProviderResponse model = new DeleteProviderResponse(); + + return model; + } + + public static class DeleteProviderResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteProviderResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteProviderResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteProviderResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteProviderResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteProviderResponsePb pb = mapper.readValue(p, DeleteProviderResponsePb.class); + return DeleteProviderResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponsePb.java new file mode 100755 index 000000000..f04e8c19d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteProviderResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteProviderResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java index ff0fb00ad..f65128562 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java @@ -4,46 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Exchange.ExchangeSerializer.class) +@JsonDeserialize(using = Exchange.ExchangeDeserializer.class) public class Exchange { /** */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("created_at") private Long createdAt; /** */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("filters") private Collection filters; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("linked_listings") private Collection linkedListings; /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("updated_at") private Long updatedAt; /** */ - @JsonProperty("updated_by") private String updatedBy; public Exchange setComment(String comment) { @@ -163,4 +165,53 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + ExchangePb toPb() { + ExchangePb pb = new ExchangePb(); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setFilters(filters); + pb.setId(id); + pb.setLinkedListings(linkedListings); + pb.setName(name); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static Exchange fromPb(ExchangePb pb) { + Exchange model = new Exchange(); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setFilters(pb.getFilters()); + model.setId(pb.getId()); + model.setLinkedListings(pb.getLinkedListings()); + model.setName(pb.getName()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class ExchangeSerializer extends JsonSerializer { + @Override + public void serialize(Exchange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeDeserializer extends JsonDeserializer { + @Override + public Exchange deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangePb pb = mapper.readValue(p, ExchangePb.class); + return Exchange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java index 85ddd4ac3..8e846f770 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java @@ -4,45 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExchangeFilter.ExchangeFilterSerializer.class) +@JsonDeserialize(using = ExchangeFilter.ExchangeFilterDeserializer.class) public class ExchangeFilter { /** */ - @JsonProperty("created_at") private Long createdAt; /** */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("exchange_id") private String exchangeId; /** */ - @JsonProperty("filter_type") private ExchangeFilterType filterType; /** */ - @JsonProperty("filter_value") private String filterValue; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("updated_at") private Long updatedAt; /** */ - @JsonProperty("updated_by") private String updatedBy; public ExchangeFilter setCreatedAt(Long createdAt) { @@ -162,4 +164,54 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + ExchangeFilterPb toPb() { + ExchangeFilterPb pb = new ExchangeFilterPb(); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setExchangeId(exchangeId); + pb.setFilterType(filterType); + pb.setFilterValue(filterValue); + pb.setId(id); + pb.setName(name); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static ExchangeFilter fromPb(ExchangeFilterPb pb) { + ExchangeFilter model = new ExchangeFilter(); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setExchangeId(pb.getExchangeId()); + model.setFilterType(pb.getFilterType()); + model.setFilterValue(pb.getFilterValue()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class ExchangeFilterSerializer extends JsonSerializer { + @Override + public void serialize(ExchangeFilter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangeFilterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeFilterDeserializer extends JsonDeserializer { + @Override + public ExchangeFilter deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangeFilterPb pb = mapper.readValue(p, ExchangeFilterPb.class); + return ExchangeFilter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterPb.java new file mode 100755 index 000000000..5ad3136d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterPb.java @@ -0,0 +1,156 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExchangeFilterPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("exchange_id") + private String exchangeId; + + @JsonProperty("filter_type") + private ExchangeFilterType filterType; + + @JsonProperty("filter_value") + private String filterValue; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public ExchangeFilterPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExchangeFilterPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExchangeFilterPb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ExchangeFilterPb setFilterType(ExchangeFilterType filterType) { + this.filterType = filterType; + return this; + } + + public ExchangeFilterType getFilterType() { + return filterType; + } + + public ExchangeFilterPb setFilterValue(String filterValue) { + this.filterValue = filterValue; + return this; + } + + public String getFilterValue() { + return filterValue; + } + + public ExchangeFilterPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExchangeFilterPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExchangeFilterPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ExchangeFilterPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeFilterPb that = (ExchangeFilterPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(filterType, that.filterType) + && Objects.equals(filterValue, that.filterValue) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, createdBy, exchangeId, filterType, filterValue, id, name, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(ExchangeFilterPb.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("exchangeId", exchangeId) + .add("filterType", filterType) + .add("filterValue", filterValue) + .add("id", id) + .add("name", name) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java index d149724ff..78cd02b12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExchangeListing.ExchangeListingSerializer.class) +@JsonDeserialize(using = ExchangeListing.ExchangeListingDeserializer.class) public class ExchangeListing { /** */ - @JsonProperty("created_at") private Long createdAt; /** */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("exchange_id") private String exchangeId; /** */ - @JsonProperty("exchange_name") private String exchangeName; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("listing_id") private String listingId; /** */ - @JsonProperty("listing_name") private String listingName; public ExchangeListing setCreatedAt(Long createdAt) { @@ -131,4 +135,50 @@ public String toString() { .add("listingName", listingName) .toString(); } + + ExchangeListingPb toPb() { + ExchangeListingPb pb = new ExchangeListingPb(); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setExchangeId(exchangeId); + pb.setExchangeName(exchangeName); + pb.setId(id); + pb.setListingId(listingId); + pb.setListingName(listingName); + + return pb; + } + + static ExchangeListing fromPb(ExchangeListingPb pb) { + ExchangeListing model = new ExchangeListing(); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setExchangeId(pb.getExchangeId()); + model.setExchangeName(pb.getExchangeName()); + model.setId(pb.getId()); + model.setListingId(pb.getListingId()); + model.setListingName(pb.getListingName()); + + return model; + } + + public static class ExchangeListingSerializer extends JsonSerializer { + @Override + public void serialize(ExchangeListing value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangeListingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeListingDeserializer extends JsonDeserializer { + @Override + public ExchangeListing deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangeListingPb pb = mapper.readValue(p, ExchangeListingPb.class); + return ExchangeListing.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListingPb.java new file mode 100755 index 000000000..77775e37c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListingPb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExchangeListingPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("exchange_id") + private String exchangeId; + + @JsonProperty("exchange_name") + private String exchangeName; + + @JsonProperty("id") + private String id; + + @JsonProperty("listing_id") + private String listingId; + + @JsonProperty("listing_name") + private String listingName; + + public ExchangeListingPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExchangeListingPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExchangeListingPb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ExchangeListingPb setExchangeName(String exchangeName) { + this.exchangeName = exchangeName; + return this; + } + + public String getExchangeName() { + return exchangeName; + } + + public ExchangeListingPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExchangeListingPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ExchangeListingPb setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeListingPb that = (ExchangeListingPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(exchangeName, that.exchangeName) + && Objects.equals(id, that.id) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName); + } + + @Override + public int hashCode() { + return Objects.hash(createdAt, createdBy, exchangeId, exchangeName, id, listingId, listingName); + } + + @Override + public String toString() { + return new ToStringer(ExchangeListingPb.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("exchangeId", exchangeId) + .add("exchangeName", exchangeName) + .add("id", id) + .add("listingId", listingId) + .add("listingName", listingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangePb.java new file mode 100755 index 000000000..53ef28fbf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangePb.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExchangePb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("filters") + private Collection filters; + + @JsonProperty("id") + private String id; + + @JsonProperty("linked_listings") + private Collection linkedListings; + + @JsonProperty("name") + private String name; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public ExchangePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ExchangePb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExchangePb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExchangePb setFilters(Collection filters) { + this.filters = filters; + return this; + } + + public Collection getFilters() { + return filters; + } + + public ExchangePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExchangePb setLinkedListings(Collection linkedListings) { + this.linkedListings = linkedListings; + return this; + } + + public Collection getLinkedListings() { + return linkedListings; + } + + public ExchangePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExchangePb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ExchangePb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangePb that = (ExchangePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(filters, that.filters) + && Objects.equals(id, that.id) + && Objects.equals(linkedListings, that.linkedListings) + && Objects.equals(name, that.name) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, createdAt, createdBy, filters, id, linkedListings, name, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(ExchangePb.class) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("filters", filters) + .add("id", id) + .add("linkedListings", linkedListings) + .add("name", name) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java index 4c47c50e1..65cb1207e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java @@ -4,49 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FileInfo.FileInfoSerializer.class) +@JsonDeserialize(using = FileInfo.FileInfoDeserializer.class) public class FileInfo { /** */ - @JsonProperty("created_at") private Long createdAt; /** Name displayed to users for applicable files, e.g. embedded notebooks */ - @JsonProperty("display_name") private String displayName; /** */ - @JsonProperty("download_link") private String downloadLink; /** */ - @JsonProperty("file_parent") private FileParent fileParent; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("marketplace_file_type") private MarketplaceFileType marketplaceFileType; /** */ - @JsonProperty("mime_type") private String mimeType; /** */ - @JsonProperty("status") private FileStatus status; /** Populated if status is in a failed state with more information on reason for the failure. */ - @JsonProperty("status_message") private String statusMessage; /** */ - @JsonProperty("updated_at") private Long updatedAt; public FileInfo setCreatedAt(Long createdAt) { @@ -186,4 +187,55 @@ public String toString() { .add("updatedAt", updatedAt) .toString(); } + + FileInfoPb toPb() { + FileInfoPb pb = new FileInfoPb(); + pb.setCreatedAt(createdAt); + pb.setDisplayName(displayName); + pb.setDownloadLink(downloadLink); + pb.setFileParent(fileParent); + pb.setId(id); + pb.setMarketplaceFileType(marketplaceFileType); + pb.setMimeType(mimeType); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + pb.setUpdatedAt(updatedAt); + + return pb; + } + + static FileInfo fromPb(FileInfoPb pb) { + FileInfo model = new FileInfo(); + model.setCreatedAt(pb.getCreatedAt()); + model.setDisplayName(pb.getDisplayName()); + model.setDownloadLink(pb.getDownloadLink()); + model.setFileParent(pb.getFileParent()); + model.setId(pb.getId()); + model.setMarketplaceFileType(pb.getMarketplaceFileType()); + model.setMimeType(pb.getMimeType()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + model.setUpdatedAt(pb.getUpdatedAt()); + + return model; + } + + public static class FileInfoSerializer extends JsonSerializer { + @Override + public void serialize(FileInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileInfoDeserializer extends JsonDeserializer { + @Override + public FileInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileInfoPb pb = mapper.readValue(p, FileInfoPb.class); + return FileInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfoPb.java new file mode 100755 index 000000000..7496dc9ca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfoPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileInfoPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("download_link") + private String downloadLink; + + @JsonProperty("file_parent") + private FileParent fileParent; + + @JsonProperty("id") + private String id; + + @JsonProperty("marketplace_file_type") + private MarketplaceFileType marketplaceFileType; + + @JsonProperty("mime_type") + private String mimeType; + + @JsonProperty("status") + private FileStatus status; + + @JsonProperty("status_message") + private String statusMessage; + + @JsonProperty("updated_at") + private Long updatedAt; + + public FileInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public FileInfoPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public FileInfoPb setDownloadLink(String downloadLink) { + this.downloadLink = downloadLink; + return this; + } + + public String getDownloadLink() { + return downloadLink; + } + + public FileInfoPb setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public FileInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public FileInfoPb setMarketplaceFileType(MarketplaceFileType marketplaceFileType) { + this.marketplaceFileType = marketplaceFileType; + return this; + } + + public MarketplaceFileType getMarketplaceFileType() { + return marketplaceFileType; + } + + public FileInfoPb setMimeType(String mimeType) { + this.mimeType = mimeType; + return this; + } + + public String getMimeType() { + return mimeType; + } + + public FileInfoPb setStatus(FileStatus status) { + this.status = status; + return this; + } + + public FileStatus getStatus() { + return status; + } + + public FileInfoPb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public FileInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileInfoPb that = (FileInfoPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(displayName, that.displayName) + && Objects.equals(downloadLink, that.downloadLink) + && Objects.equals(fileParent, that.fileParent) + && Objects.equals(id, that.id) + && Objects.equals(marketplaceFileType, that.marketplaceFileType) + && Objects.equals(mimeType, that.mimeType) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, + displayName, + downloadLink, + fileParent, + id, + marketplaceFileType, + mimeType, + status, + statusMessage, + updatedAt); + } + + @Override + public String toString() { + return new ToStringer(FileInfoPb.class) + .add("createdAt", createdAt) + .add("displayName", displayName) + .add("downloadLink", downloadLink) + .add("fileParent", fileParent) + .add("id", id) + .add("marketplaceFileType", marketplaceFileType) + .add("mimeType", mimeType) + .add("status", status) + .add("statusMessage", statusMessage) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java index e2e2ddec5..4a1bdcacb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FileParent.FileParentSerializer.class) +@JsonDeserialize(using = FileParent.FileParentDeserializer.class) public class FileParent { /** */ - @JsonProperty("file_parent_type") private FileParentType fileParentType; /** TODO make the following fields required */ - @JsonProperty("parent_id") private String parentId; public FileParent setFileParentType(FileParentType fileParentType) { @@ -56,4 +65,39 @@ public String toString() { .add("parentId", parentId) .toString(); } + + FileParentPb toPb() { + FileParentPb pb = new FileParentPb(); + pb.setFileParentType(fileParentType); + pb.setParentId(parentId); + + return pb; + } + + static FileParent fromPb(FileParentPb pb) { + FileParent model = new FileParent(); + model.setFileParentType(pb.getFileParentType()); + model.setParentId(pb.getParentId()); + + return model; + } + + public static class FileParentSerializer extends JsonSerializer { + @Override + public void serialize(FileParent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileParentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileParentDeserializer extends JsonDeserializer { + @Override + public FileParent deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileParentPb pb = mapper.readValue(p, FileParentPb.class); + return FileParent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentPb.java new file mode 100755 index 000000000..6ee1681de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileParentPb { + @JsonProperty("file_parent_type") + private FileParentType fileParentType; + + @JsonProperty("parent_id") + private String parentId; + + public FileParentPb setFileParentType(FileParentType fileParentType) { + this.fileParentType = fileParentType; + return this; + } + + public FileParentType getFileParentType() { + return fileParentType; + } + + public FileParentPb setParentId(String parentId) { + this.parentId = parentId; + return this; + } + + public String getParentId() { + return parentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileParentPb that = (FileParentPb) o; + return Objects.equals(fileParentType, that.fileParentType) + && Objects.equals(parentId, that.parentId); + } + + @Override + public int hashCode() { + return Objects.hash(fileParentType, parentId); + } + + @Override + public String toString() { + return new ToStringer(FileParentPb.class) + .add("fileParentType", fileParentType) + .add("parentId", parentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java index e6e3903ae..51181d92d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an exchange */ @Generated +@JsonSerialize(using = GetExchangeRequest.GetExchangeRequestSerializer.class) +@JsonDeserialize(using = GetExchangeRequest.GetExchangeRequestDeserializer.class) public class GetExchangeRequest { /** */ - @JsonIgnore private String id; + private String id; public GetExchangeRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetExchangeRequest.class).add("id", id).toString(); } + + GetExchangeRequestPb toPb() { + GetExchangeRequestPb pb = new GetExchangeRequestPb(); + pb.setId(id); + + return pb; + } + + static GetExchangeRequest fromPb(GetExchangeRequestPb pb) { + GetExchangeRequest model = new GetExchangeRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetExchangeRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetExchangeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExchangeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExchangeRequestDeserializer extends JsonDeserializer { + @Override + public GetExchangeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExchangeRequestPb pb = mapper.readValue(p, GetExchangeRequestPb.class); + return GetExchangeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequestPb.java new file mode 100755 index 000000000..f17a01d97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an exchange */ +@Generated +class GetExchangeRequestPb { + @JsonIgnore private String id; + + public GetExchangeRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExchangeRequestPb that = (GetExchangeRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetExchangeRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java index bffa34862..24248a656 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetExchangeResponse.GetExchangeResponseSerializer.class) +@JsonDeserialize(using = GetExchangeResponse.GetExchangeResponseDeserializer.class) public class GetExchangeResponse { /** */ - @JsonProperty("exchange") private Exchange exchange; public GetExchangeResponse setExchange(Exchange exchange) { @@ -39,4 +49,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetExchangeResponse.class).add("exchange", exchange).toString(); } + + GetExchangeResponsePb toPb() { + GetExchangeResponsePb pb = new GetExchangeResponsePb(); + pb.setExchange(exchange); + + return pb; + } + + static GetExchangeResponse fromPb(GetExchangeResponsePb pb) { + GetExchangeResponse model = new GetExchangeResponse(); + model.setExchange(pb.getExchange()); + + return model; + } + + public static class GetExchangeResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetExchangeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExchangeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExchangeResponseDeserializer + extends JsonDeserializer { + @Override + public GetExchangeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExchangeResponsePb pb = mapper.readValue(p, GetExchangeResponsePb.class); + return GetExchangeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponsePb.java new file mode 100755 index 000000000..7d804492d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetExchangeResponsePb { + @JsonProperty("exchange") + private Exchange exchange; + + public GetExchangeResponsePb setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExchangeResponsePb that = (GetExchangeResponsePb) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(GetExchangeResponsePb.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java index d0da2329f..783e6cf3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a file */ @Generated +@JsonSerialize(using = GetFileRequest.GetFileRequestSerializer.class) +@JsonDeserialize(using = GetFileRequest.GetFileRequestDeserializer.class) public class GetFileRequest { /** */ - @JsonIgnore private String fileId; + private String fileId; public GetFileRequest setFileId(String fileId) { this.fileId = fileId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetFileRequest.class).add("fileId", fileId).toString(); } + + GetFileRequestPb toPb() { + GetFileRequestPb pb = new GetFileRequestPb(); + pb.setFileId(fileId); + + return pb; + } + + static GetFileRequest fromPb(GetFileRequestPb pb) { + GetFileRequest model = new GetFileRequest(); + model.setFileId(pb.getFileId()); + + return model; + } + + public static class GetFileRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetFileRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetFileRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetFileRequestDeserializer extends JsonDeserializer { + @Override + public GetFileRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetFileRequestPb pb = mapper.readValue(p, GetFileRequestPb.class); + return GetFileRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequestPb.java new file mode 100755 index 000000000..2c0ec44a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a file */ +@Generated +class GetFileRequestPb { + @JsonIgnore private String fileId; + + public GetFileRequestPb setFileId(String fileId) { + this.fileId = fileId; + return this; + } + + public String getFileId() { + return fileId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFileRequestPb that = (GetFileRequestPb) o; + return Objects.equals(fileId, that.fileId); + } + + @Override + public int hashCode() { + return Objects.hash(fileId); + } + + @Override + public String toString() { + return new ToStringer(GetFileRequestPb.class).add("fileId", fileId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java index afcacadac..cd25bdf82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetFileResponse.GetFileResponseSerializer.class) +@JsonDeserialize(using = GetFileResponse.GetFileResponseDeserializer.class) public class GetFileResponse { /** */ - @JsonProperty("file_info") private FileInfo fileInfo; public GetFileResponse setFileInfo(FileInfo fileInfo) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetFileResponse.class).add("fileInfo", fileInfo).toString(); } + + GetFileResponsePb toPb() { + GetFileResponsePb pb = new GetFileResponsePb(); + pb.setFileInfo(fileInfo); + + return pb; + } + + static GetFileResponse fromPb(GetFileResponsePb pb) { + GetFileResponse model = new GetFileResponse(); + model.setFileInfo(pb.getFileInfo()); + + return model; + } + + public static class GetFileResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetFileResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetFileResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetFileResponseDeserializer extends JsonDeserializer { + @Override + public GetFileResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetFileResponsePb pb = mapper.readValue(p, GetFileResponsePb.class); + return GetFileResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponsePb.java new file mode 100755 index 000000000..846e27213 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetFileResponsePb { + @JsonProperty("file_info") + private FileInfo fileInfo; + + public GetFileResponsePb setFileInfo(FileInfo fileInfo) { + this.fileInfo = fileInfo; + return this; + } + + public FileInfo getFileInfo() { + return fileInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFileResponsePb that = (GetFileResponsePb) o; + return Objects.equals(fileInfo, that.fileInfo); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfo); + } + + @Override + public String toString() { + return new ToStringer(GetFileResponsePb.class).add("fileInfo", fileInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java index 6ae01e89b..c313a66d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java @@ -4,13 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetLatestVersionProviderAnalyticsDashboardResponse + .GetLatestVersionProviderAnalyticsDashboardResponseSerializer.class) +@JsonDeserialize( + using = + GetLatestVersionProviderAnalyticsDashboardResponse + .GetLatestVersionProviderAnalyticsDashboardResponseDeserializer.class) public class GetLatestVersionProviderAnalyticsDashboardResponse { /** version here is latest logical version of the dashboard template */ - @JsonProperty("version") private Long version; public GetLatestVersionProviderAnalyticsDashboardResponse setVersion(Long version) { @@ -42,4 +58,47 @@ public String toString() { .add("version", version) .toString(); } + + GetLatestVersionProviderAnalyticsDashboardResponsePb toPb() { + GetLatestVersionProviderAnalyticsDashboardResponsePb pb = + new GetLatestVersionProviderAnalyticsDashboardResponsePb(); + pb.setVersion(version); + + return pb; + } + + static GetLatestVersionProviderAnalyticsDashboardResponse fromPb( + GetLatestVersionProviderAnalyticsDashboardResponsePb pb) { + GetLatestVersionProviderAnalyticsDashboardResponse model = + new GetLatestVersionProviderAnalyticsDashboardResponse(); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class GetLatestVersionProviderAnalyticsDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLatestVersionProviderAnalyticsDashboardResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetLatestVersionProviderAnalyticsDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLatestVersionProviderAnalyticsDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public GetLatestVersionProviderAnalyticsDashboardResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLatestVersionProviderAnalyticsDashboardResponsePb pb = + mapper.readValue(p, GetLatestVersionProviderAnalyticsDashboardResponsePb.class); + return GetLatestVersionProviderAnalyticsDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponsePb.java new file mode 100755 index 000000000..04609d2bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetLatestVersionProviderAnalyticsDashboardResponsePb { + @JsonProperty("version") + private Long version; + + public GetLatestVersionProviderAnalyticsDashboardResponsePb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLatestVersionProviderAnalyticsDashboardResponsePb that = + (GetLatestVersionProviderAnalyticsDashboardResponsePb) o; + return Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(version); + } + + @Override + public String toString() { + return new ToStringer(GetLatestVersionProviderAnalyticsDashboardResponsePb.class) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java index 3201c3ea6..14c082efe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java @@ -3,25 +3,33 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get listing content metadata */ @Generated +@JsonSerialize( + using = GetListingContentMetadataRequest.GetListingContentMetadataRequestSerializer.class) +@JsonDeserialize( + using = GetListingContentMetadataRequest.GetListingContentMetadataRequestDeserializer.class) public class GetListingContentMetadataRequest { /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public GetListingContentMetadataRequest setListingId(String listingId) { @@ -74,4 +82,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + GetListingContentMetadataRequestPb toPb() { + GetListingContentMetadataRequestPb pb = new GetListingContentMetadataRequestPb(); + pb.setListingId(listingId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static GetListingContentMetadataRequest fromPb(GetListingContentMetadataRequestPb pb) { + GetListingContentMetadataRequest model = new GetListingContentMetadataRequest(); + model.setListingId(pb.getListingId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class GetListingContentMetadataRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetListingContentMetadataRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingContentMetadataRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingContentMetadataRequestDeserializer + extends JsonDeserializer { + @Override + public GetListingContentMetadataRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingContentMetadataRequestPb pb = + mapper.readValue(p, GetListingContentMetadataRequestPb.class); + return GetListingContentMetadataRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequestPb.java new file mode 100755 index 000000000..98ede15fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get listing content metadata */ +@Generated +class GetListingContentMetadataRequestPb { + @JsonIgnore private String listingId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public GetListingContentMetadataRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public GetListingContentMetadataRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GetListingContentMetadataRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingContentMetadataRequestPb that = (GetListingContentMetadataRequestPb) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingContentMetadataRequestPb.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java index 37479bc73..6606b19c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetListingContentMetadataResponse.GetListingContentMetadataResponseSerializer.class) +@JsonDeserialize( + using = GetListingContentMetadataResponse.GetListingContentMetadataResponseDeserializer.class) public class GetListingContentMetadataResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("shared_data_objects") private Collection sharedDataObjects; public GetListingContentMetadataResponse setNextPageToken(String nextPageToken) { @@ -58,4 +69,44 @@ public String toString() { .add("sharedDataObjects", sharedDataObjects) .toString(); } + + GetListingContentMetadataResponsePb toPb() { + GetListingContentMetadataResponsePb pb = new GetListingContentMetadataResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSharedDataObjects(sharedDataObjects); + + return pb; + } + + static GetListingContentMetadataResponse fromPb(GetListingContentMetadataResponsePb pb) { + GetListingContentMetadataResponse model = new GetListingContentMetadataResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSharedDataObjects(pb.getSharedDataObjects()); + + return model; + } + + public static class GetListingContentMetadataResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetListingContentMetadataResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingContentMetadataResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingContentMetadataResponseDeserializer + extends JsonDeserializer { + @Override + public GetListingContentMetadataResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingContentMetadataResponsePb pb = + mapper.readValue(p, GetListingContentMetadataResponsePb.class); + return GetListingContentMetadataResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponsePb.java new file mode 100755 index 000000000..3814a9987 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetListingContentMetadataResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("shared_data_objects") + private Collection sharedDataObjects; + + public GetListingContentMetadataResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetListingContentMetadataResponsePb setSharedDataObjects( + Collection sharedDataObjects) { + this.sharedDataObjects = sharedDataObjects; + return this; + } + + public Collection getSharedDataObjects() { + return sharedDataObjects; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingContentMetadataResponsePb that = (GetListingContentMetadataResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(sharedDataObjects, that.sharedDataObjects); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, sharedDataObjects); + } + + @Override + public String toString() { + return new ToStringer(GetListingContentMetadataResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("sharedDataObjects", sharedDataObjects) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java index e89f17ce2..38826427a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get listing */ @Generated +@JsonSerialize(using = GetListingRequest.GetListingRequestSerializer.class) +@JsonDeserialize(using = GetListingRequest.GetListingRequestDeserializer.class) public class GetListingRequest { /** */ - @JsonIgnore private String id; + private String id; public GetListingRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetListingRequest.class).add("id", id).toString(); } + + GetListingRequestPb toPb() { + GetListingRequestPb pb = new GetListingRequestPb(); + pb.setId(id); + + return pb; + } + + static GetListingRequest fromPb(GetListingRequestPb pb) { + GetListingRequest model = new GetListingRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetListingRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingRequestDeserializer extends JsonDeserializer { + @Override + public GetListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingRequestPb pb = mapper.readValue(p, GetListingRequestPb.class); + return GetListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequestPb.java new file mode 100755 index 000000000..9e1c56240 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get listing */ +@Generated +class GetListingRequestPb { + @JsonIgnore private String id; + + public GetListingRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingRequestPb that = (GetListingRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetListingRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java index 720e1f5dd..c2bd722a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetListingResponse.GetListingResponseSerializer.class) +@JsonDeserialize(using = GetListingResponse.GetListingResponseDeserializer.class) public class GetListingResponse { /** */ - @JsonProperty("listing") private Listing listing; public GetListingResponse setListing(Listing listing) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetListingResponse.class).add("listing", listing).toString(); } + + GetListingResponsePb toPb() { + GetListingResponsePb pb = new GetListingResponsePb(); + pb.setListing(listing); + + return pb; + } + + static GetListingResponse fromPb(GetListingResponsePb pb) { + GetListingResponse model = new GetListingResponse(); + model.setListing(pb.getListing()); + + return model; + } + + public static class GetListingResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingResponseDeserializer extends JsonDeserializer { + @Override + public GetListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingResponsePb pb = mapper.readValue(p, GetListingResponsePb.class); + return GetListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponsePb.java new file mode 100755 index 000000000..462afc9c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetListingResponsePb { + @JsonProperty("listing") + private Listing listing; + + public GetListingResponsePb setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingResponsePb that = (GetListingResponsePb) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(GetListingResponsePb.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java index dfc06799c..bb541bb86 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List listings */ @Generated +@JsonSerialize(using = GetListingsRequest.GetListingsRequestSerializer.class) +@JsonDeserialize(using = GetListingsRequest.GetListingsRequestDeserializer.class) public class GetListingsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public GetListingsRequest setPageSize(Long pageSize) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + GetListingsRequestPb toPb() { + GetListingsRequestPb pb = new GetListingsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static GetListingsRequest fromPb(GetListingsRequestPb pb) { + GetListingsRequest model = new GetListingsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class GetListingsRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetListingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingsRequestDeserializer extends JsonDeserializer { + @Override + public GetListingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingsRequestPb pb = mapper.readValue(p, GetListingsRequestPb.class); + return GetListingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequestPb.java new file mode 100755 index 000000000..9904900de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List listings */ +@Generated +class GetListingsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public GetListingsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GetListingsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingsRequestPb that = (GetListingsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java index d466edf57..1b2434d23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetListingsResponse.GetListingsResponseSerializer.class) +@JsonDeserialize(using = GetListingsResponse.GetListingsResponseDeserializer.class) public class GetListingsResponse { /** */ - @JsonProperty("listings") private Collection listings; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public GetListingsResponse setListings(Collection listings) { @@ -57,4 +66,41 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetListingsResponsePb toPb() { + GetListingsResponsePb pb = new GetListingsResponsePb(); + pb.setListings(listings); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetListingsResponse fromPb(GetListingsResponsePb pb) { + GetListingsResponse model = new GetListingsResponse(); + model.setListings(pb.getListings()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetListingsResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetListingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetListingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetListingsResponseDeserializer + extends JsonDeserializer { + @Override + public GetListingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetListingsResponsePb pb = mapper.readValue(p, GetListingsResponsePb.class); + return GetListingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponsePb.java new file mode 100755 index 000000000..5db483ee1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetListingsResponsePb { + @JsonProperty("listings") + private Collection listings; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetListingsResponsePb setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public GetListingsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingsResponsePb that = (GetListingsResponsePb) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingsResponsePb.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java index d6b1073c0..4d7eb2aff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the personalization request for a listing */ @Generated +@JsonSerialize( + using = GetPersonalizationRequestRequest.GetPersonalizationRequestRequestSerializer.class) +@JsonDeserialize( + using = GetPersonalizationRequestRequest.GetPersonalizationRequestRequestDeserializer.class) public class GetPersonalizationRequestRequest { /** */ - @JsonIgnore private String listingId; + private String listingId; public GetPersonalizationRequestRequest setListingId(String listingId) { this.listingId = listingId; @@ -41,4 +54,42 @@ public String toString() { .add("listingId", listingId) .toString(); } + + GetPersonalizationRequestRequestPb toPb() { + GetPersonalizationRequestRequestPb pb = new GetPersonalizationRequestRequestPb(); + pb.setListingId(listingId); + + return pb; + } + + static GetPersonalizationRequestRequest fromPb(GetPersonalizationRequestRequestPb pb) { + GetPersonalizationRequestRequest model = new GetPersonalizationRequestRequest(); + model.setListingId(pb.getListingId()); + + return model; + } + + public static class GetPersonalizationRequestRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPersonalizationRequestRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPersonalizationRequestRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPersonalizationRequestRequestDeserializer + extends JsonDeserializer { + @Override + public GetPersonalizationRequestRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPersonalizationRequestRequestPb pb = + mapper.readValue(p, GetPersonalizationRequestRequestPb.class); + return GetPersonalizationRequestRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequestPb.java new file mode 100755 index 000000000..aaead913d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the personalization request for a listing */ +@Generated +class GetPersonalizationRequestRequestPb { + @JsonIgnore private String listingId; + + public GetPersonalizationRequestRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPersonalizationRequestRequestPb that = (GetPersonalizationRequestRequestPb) o; + return Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(listingId); + } + + @Override + public String toString() { + return new ToStringer(GetPersonalizationRequestRequestPb.class) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java index 97c64cb26..35cda1ec6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetPersonalizationRequestResponse.GetPersonalizationRequestResponseSerializer.class) +@JsonDeserialize( + using = GetPersonalizationRequestResponse.GetPersonalizationRequestResponseDeserializer.class) public class GetPersonalizationRequestResponse { /** */ - @JsonProperty("personalization_requests") private Collection personalizationRequests; public GetPersonalizationRequestResponse setPersonalizationRequests( @@ -43,4 +55,42 @@ public String toString() { .add("personalizationRequests", personalizationRequests) .toString(); } + + GetPersonalizationRequestResponsePb toPb() { + GetPersonalizationRequestResponsePb pb = new GetPersonalizationRequestResponsePb(); + pb.setPersonalizationRequests(personalizationRequests); + + return pb; + } + + static GetPersonalizationRequestResponse fromPb(GetPersonalizationRequestResponsePb pb) { + GetPersonalizationRequestResponse model = new GetPersonalizationRequestResponse(); + model.setPersonalizationRequests(pb.getPersonalizationRequests()); + + return model; + } + + public static class GetPersonalizationRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPersonalizationRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPersonalizationRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPersonalizationRequestResponseDeserializer + extends JsonDeserializer { + @Override + public GetPersonalizationRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPersonalizationRequestResponsePb pb = + mapper.readValue(p, GetPersonalizationRequestResponsePb.class); + return GetPersonalizationRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponsePb.java new file mode 100755 index 000000000..703ddd0e9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPersonalizationRequestResponsePb { + @JsonProperty("personalization_requests") + private Collection personalizationRequests; + + public GetPersonalizationRequestResponsePb setPersonalizationRequests( + Collection personalizationRequests) { + this.personalizationRequests = personalizationRequests; + return this; + } + + public Collection getPersonalizationRequests() { + return personalizationRequests; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPersonalizationRequestResponsePb that = (GetPersonalizationRequestResponsePb) o; + return Objects.equals(personalizationRequests, that.personalizationRequests); + } + + @Override + public int hashCode() { + return Objects.hash(personalizationRequests); + } + + @Override + public String toString() { + return new ToStringer(GetPersonalizationRequestResponsePb.class) + .add("personalizationRequests", personalizationRequests) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java index 459501392..86733227a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a provider */ @Generated +@JsonSerialize(using = GetProviderRequest.GetProviderRequestSerializer.class) +@JsonDeserialize(using = GetProviderRequest.GetProviderRequestDeserializer.class) public class GetProviderRequest { /** */ - @JsonIgnore private String id; + private String id; public GetProviderRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetProviderRequest.class).add("id", id).toString(); } + + GetProviderRequestPb toPb() { + GetProviderRequestPb pb = new GetProviderRequestPb(); + pb.setId(id); + + return pb; + } + + static GetProviderRequest fromPb(GetProviderRequestPb pb) { + GetProviderRequest model = new GetProviderRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetProviderRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetProviderRequestDeserializer extends JsonDeserializer { + @Override + public GetProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetProviderRequestPb pb = mapper.readValue(p, GetProviderRequestPb.class); + return GetProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequestPb.java new file mode 100755 index 000000000..fa1e3b02c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a provider */ +@Generated +class GetProviderRequestPb { + @JsonIgnore private String id; + + public GetProviderRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProviderRequestPb that = (GetProviderRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetProviderRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java index 0330f3f34..f300e2147 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetProviderResponse.GetProviderResponseSerializer.class) +@JsonDeserialize(using = GetProviderResponse.GetProviderResponseDeserializer.class) public class GetProviderResponse { /** */ - @JsonProperty("provider") private ProviderInfo provider; public GetProviderResponse setProvider(ProviderInfo provider) { @@ -39,4 +49,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetProviderResponse.class).add("provider", provider).toString(); } + + GetProviderResponsePb toPb() { + GetProviderResponsePb pb = new GetProviderResponsePb(); + pb.setProvider(provider); + + return pb; + } + + static GetProviderResponse fromPb(GetProviderResponsePb pb) { + GetProviderResponse model = new GetProviderResponse(); + model.setProvider(pb.getProvider()); + + return model; + } + + public static class GetProviderResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetProviderResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetProviderResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetProviderResponseDeserializer + extends JsonDeserializer { + @Override + public GetProviderResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetProviderResponsePb pb = mapper.readValue(p, GetProviderResponsePb.class); + return GetProviderResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponsePb.java new file mode 100755 index 000000000..07e4ca243 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetProviderResponsePb { + @JsonProperty("provider") + private ProviderInfo provider; + + public GetProviderResponsePb setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProviderResponsePb that = (GetProviderResponsePb) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(GetProviderResponsePb.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java index 3375b657d..2f8d08fa8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Installation.InstallationSerializer.class) +@JsonDeserialize(using = Installation.InstallationDeserializer.class) public class Installation { /** */ - @JsonProperty("installation") private InstallationDetail installation; public Installation setInstallation(InstallationDetail installation) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Installation.class).add("installation", installation).toString(); } + + InstallationPb toPb() { + InstallationPb pb = new InstallationPb(); + pb.setInstallation(installation); + + return pb; + } + + static Installation fromPb(InstallationPb pb) { + Installation model = new Installation(); + model.setInstallation(pb.getInstallation()); + + return model; + } + + public static class InstallationSerializer extends JsonSerializer { + @Override + public void serialize(Installation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstallationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstallationDeserializer extends JsonDeserializer { + @Override + public Installation deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstallationPb pb = mapper.readValue(p, InstallationPb.class); + return Installation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java index c7a3574b9..c39e0a2da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java @@ -4,62 +4,60 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = InstallationDetail.InstallationDetailSerializer.class) +@JsonDeserialize(using = InstallationDetail.InstallationDetailDeserializer.class) public class InstallationDetail { /** */ - @JsonProperty("catalog_name") private String catalogName; /** */ - @JsonProperty("error_message") private String errorMessage; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("installed_on") private Long installedOn; /** */ - @JsonProperty("listing_id") private String listingId; /** */ - @JsonProperty("listing_name") private String listingName; /** */ - @JsonProperty("recipient_type") private DeltaSharingRecipientType recipientType; /** */ - @JsonProperty("repo_name") private String repoName; /** */ - @JsonProperty("repo_path") private String repoPath; /** */ - @JsonProperty("share_name") private String shareName; /** */ - @JsonProperty("status") private InstallationStatus status; /** */ - @JsonProperty("token_detail") private TokenDetail tokenDetail; /** */ - @JsonProperty("tokens") private Collection tokens; public InstallationDetail setCatalogName(String catalogName) { @@ -235,4 +233,62 @@ public String toString() { .add("tokens", tokens) .toString(); } + + InstallationDetailPb toPb() { + InstallationDetailPb pb = new InstallationDetailPb(); + pb.setCatalogName(catalogName); + pb.setErrorMessage(errorMessage); + pb.setId(id); + pb.setInstalledOn(installedOn); + pb.setListingId(listingId); + pb.setListingName(listingName); + pb.setRecipientType(recipientType); + pb.setRepoName(repoName); + pb.setRepoPath(repoPath); + pb.setShareName(shareName); + pb.setStatus(status); + pb.setTokenDetail(tokenDetail); + pb.setTokens(tokens); + + return pb; + } + + static InstallationDetail fromPb(InstallationDetailPb pb) { + InstallationDetail model = new InstallationDetail(); + model.setCatalogName(pb.getCatalogName()); + model.setErrorMessage(pb.getErrorMessage()); + model.setId(pb.getId()); + model.setInstalledOn(pb.getInstalledOn()); + model.setListingId(pb.getListingId()); + model.setListingName(pb.getListingName()); + model.setRecipientType(pb.getRecipientType()); + model.setRepoName(pb.getRepoName()); + model.setRepoPath(pb.getRepoPath()); + model.setShareName(pb.getShareName()); + model.setStatus(pb.getStatus()); + model.setTokenDetail(pb.getTokenDetail()); + model.setTokens(pb.getTokens()); + + return model; + } + + public static class InstallationDetailSerializer extends JsonSerializer { + @Override + public void serialize(InstallationDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InstallationDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InstallationDetailDeserializer extends JsonDeserializer { + @Override + public InstallationDetail deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InstallationDetailPb pb = mapper.readValue(p, InstallationDetailPb.class); + return InstallationDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetailPb.java new file mode 100755 index 000000000..88b8c170c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetailPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class InstallationDetailPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("error_message") + private String errorMessage; + + @JsonProperty("id") + private String id; + + @JsonProperty("installed_on") + private Long installedOn; + + @JsonProperty("listing_id") + private String listingId; + + @JsonProperty("listing_name") + private String listingName; + + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + @JsonProperty("repo_name") + private String repoName; + + @JsonProperty("repo_path") + private String repoPath; + + @JsonProperty("share_name") + private String shareName; + + @JsonProperty("status") + private InstallationStatus status; + + @JsonProperty("token_detail") + private TokenDetail tokenDetail; + + @JsonProperty("tokens") + private Collection tokens; + + public InstallationDetailPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public InstallationDetailPb setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public InstallationDetailPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public InstallationDetailPb setInstalledOn(Long installedOn) { + this.installedOn = installedOn; + return this; + } + + public Long getInstalledOn() { + return installedOn; + } + + public InstallationDetailPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public InstallationDetailPb setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + public InstallationDetailPb setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public InstallationDetailPb setRepoName(String repoName) { + this.repoName = repoName; + return this; + } + + public String getRepoName() { + return repoName; + } + + public InstallationDetailPb setRepoPath(String repoPath) { + this.repoPath = repoPath; + return this; + } + + public String getRepoPath() { + return repoPath; + } + + public InstallationDetailPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public InstallationDetailPb setStatus(InstallationStatus status) { + this.status = status; + return this; + } + + public InstallationStatus getStatus() { + return status; + } + + public InstallationDetailPb setTokenDetail(TokenDetail tokenDetail) { + this.tokenDetail = tokenDetail; + return this; + } + + public TokenDetail getTokenDetail() { + return tokenDetail; + } + + public InstallationDetailPb setTokens(Collection tokens) { + this.tokens = tokens; + return this; + } + + public Collection getTokens() { + return tokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstallationDetailPb that = (InstallationDetailPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(id, that.id) + && Objects.equals(installedOn, that.installedOn) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoName, that.repoName) + && Objects.equals(repoPath, that.repoPath) + && Objects.equals(shareName, that.shareName) + && Objects.equals(status, that.status) + && Objects.equals(tokenDetail, that.tokenDetail) + && Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash( + catalogName, + errorMessage, + id, + installedOn, + listingId, + listingName, + recipientType, + repoName, + repoPath, + shareName, + status, + tokenDetail, + tokens); + } + + @Override + public String toString() { + return new ToStringer(InstallationDetailPb.class) + .add("catalogName", catalogName) + .add("errorMessage", errorMessage) + .add("id", id) + .add("installedOn", installedOn) + .add("listingId", listingId) + .add("listingName", listingName) + .add("recipientType", recipientType) + .add("repoName", repoName) + .add("repoPath", repoPath) + .add("shareName", shareName) + .add("status", status) + .add("tokenDetail", tokenDetail) + .add("tokens", tokens) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationPb.java new file mode 100755 index 000000000..5cff1d679 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class InstallationPb { + @JsonProperty("installation") + private InstallationDetail installation; + + public InstallationPb setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstallationPb that = (InstallationPb) o; + return Objects.equals(installation, that.installation); + } + + @Override + public int hashCode() { + return Objects.hash(installation); + } + + @Override + public String toString() { + return new ToStringer(InstallationPb.class).add("installation", installation).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java index c8bd0b125..4f0c436e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all installations */ @Generated +@JsonSerialize(using = ListAllInstallationsRequest.ListAllInstallationsRequestSerializer.class) +@JsonDeserialize(using = ListAllInstallationsRequest.ListAllInstallationsRequestDeserializer.class) public class ListAllInstallationsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAllInstallationsRequest setPageSize(Long pageSize) { @@ -59,4 +65,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAllInstallationsRequestPb toPb() { + ListAllInstallationsRequestPb pb = new ListAllInstallationsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAllInstallationsRequest fromPb(ListAllInstallationsRequestPb pb) { + ListAllInstallationsRequest model = new ListAllInstallationsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAllInstallationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAllInstallationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAllInstallationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAllInstallationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAllInstallationsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAllInstallationsRequestPb pb = mapper.readValue(p, ListAllInstallationsRequestPb.class); + return ListAllInstallationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequestPb.java new file mode 100755 index 000000000..1de3e6433 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all installations */ +@Generated +class ListAllInstallationsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAllInstallationsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAllInstallationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllInstallationsRequestPb that = (ListAllInstallationsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllInstallationsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java index 9e19e2be8..f03e5c034 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java @@ -4,18 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAllInstallationsResponse.ListAllInstallationsResponseSerializer.class) +@JsonDeserialize( + using = ListAllInstallationsResponse.ListAllInstallationsResponseDeserializer.class) public class ListAllInstallationsResponse { /** */ - @JsonProperty("installations") private Collection installations; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListAllInstallationsResponse setInstallations( @@ -58,4 +68,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListAllInstallationsResponsePb toPb() { + ListAllInstallationsResponsePb pb = new ListAllInstallationsResponsePb(); + pb.setInstallations(installations); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListAllInstallationsResponse fromPb(ListAllInstallationsResponsePb pb) { + ListAllInstallationsResponse model = new ListAllInstallationsResponse(); + model.setInstallations(pb.getInstallations()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListAllInstallationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAllInstallationsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAllInstallationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAllInstallationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListAllInstallationsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAllInstallationsResponsePb pb = mapper.readValue(p, ListAllInstallationsResponsePb.class); + return ListAllInstallationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponsePb.java new file mode 100755 index 000000000..b48cd872d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAllInstallationsResponsePb { + @JsonProperty("installations") + private Collection installations; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListAllInstallationsResponsePb setInstallations( + Collection installations) { + this.installations = installations; + return this; + } + + public Collection getInstallations() { + return installations; + } + + public ListAllInstallationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllInstallationsResponsePb that = (ListAllInstallationsResponsePb) o; + return Objects.equals(installations, that.installations) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(installations, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllInstallationsResponsePb.class) + .add("installations", installations) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java index 04aba4605..a00517dae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java @@ -3,22 +3,33 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all personalization requests */ @Generated +@JsonSerialize( + using = + ListAllPersonalizationRequestsRequest.ListAllPersonalizationRequestsRequestSerializer.class) +@JsonDeserialize( + using = + ListAllPersonalizationRequestsRequest.ListAllPersonalizationRequestsRequestDeserializer + .class) public class ListAllPersonalizationRequestsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAllPersonalizationRequestsRequest setPageSize(Long pageSize) { @@ -59,4 +70,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAllPersonalizationRequestsRequestPb toPb() { + ListAllPersonalizationRequestsRequestPb pb = new ListAllPersonalizationRequestsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAllPersonalizationRequestsRequest fromPb(ListAllPersonalizationRequestsRequestPb pb) { + ListAllPersonalizationRequestsRequest model = new ListAllPersonalizationRequestsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAllPersonalizationRequestsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAllPersonalizationRequestsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAllPersonalizationRequestsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAllPersonalizationRequestsRequestDeserializer + extends JsonDeserializer { + @Override + public ListAllPersonalizationRequestsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAllPersonalizationRequestsRequestPb pb = + mapper.readValue(p, ListAllPersonalizationRequestsRequestPb.class); + return ListAllPersonalizationRequestsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequestPb.java new file mode 100755 index 000000000..cbac4b1f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all personalization requests */ +@Generated +class ListAllPersonalizationRequestsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAllPersonalizationRequestsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAllPersonalizationRequestsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllPersonalizationRequestsRequestPb that = (ListAllPersonalizationRequestsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllPersonalizationRequestsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java index 6ab6333f1..01bab74a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java @@ -4,18 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListAllPersonalizationRequestsResponse.ListAllPersonalizationRequestsResponseSerializer + .class) +@JsonDeserialize( + using = + ListAllPersonalizationRequestsResponse.ListAllPersonalizationRequestsResponseDeserializer + .class) public class ListAllPersonalizationRequestsResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("personalization_requests") private Collection personalizationRequests; public ListAllPersonalizationRequestsResponse setNextPageToken(String nextPageToken) { @@ -58,4 +73,47 @@ public String toString() { .add("personalizationRequests", personalizationRequests) .toString(); } + + ListAllPersonalizationRequestsResponsePb toPb() { + ListAllPersonalizationRequestsResponsePb pb = new ListAllPersonalizationRequestsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPersonalizationRequests(personalizationRequests); + + return pb; + } + + static ListAllPersonalizationRequestsResponse fromPb( + ListAllPersonalizationRequestsResponsePb pb) { + ListAllPersonalizationRequestsResponse model = new ListAllPersonalizationRequestsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPersonalizationRequests(pb.getPersonalizationRequests()); + + return model; + } + + public static class ListAllPersonalizationRequestsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAllPersonalizationRequestsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListAllPersonalizationRequestsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAllPersonalizationRequestsResponseDeserializer + extends JsonDeserializer { + @Override + public ListAllPersonalizationRequestsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAllPersonalizationRequestsResponsePb pb = + mapper.readValue(p, ListAllPersonalizationRequestsResponsePb.class); + return ListAllPersonalizationRequestsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponsePb.java new file mode 100755 index 000000000..13a260517 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAllPersonalizationRequestsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("personalization_requests") + private Collection personalizationRequests; + + public ListAllPersonalizationRequestsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListAllPersonalizationRequestsResponsePb setPersonalizationRequests( + Collection personalizationRequests) { + this.personalizationRequests = personalizationRequests; + return this; + } + + public Collection getPersonalizationRequests() { + return personalizationRequests; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllPersonalizationRequestsResponsePb that = (ListAllPersonalizationRequestsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(personalizationRequests, that.personalizationRequests); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, personalizationRequests); + } + + @Override + public String toString() { + return new ToStringer(ListAllPersonalizationRequestsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("personalizationRequests", personalizationRequests) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java index dfb0dffa6..6aff38206 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List exchange filters */ @Generated +@JsonSerialize(using = ListExchangeFiltersRequest.ListExchangeFiltersRequestSerializer.class) +@JsonDeserialize(using = ListExchangeFiltersRequest.ListExchangeFiltersRequestDeserializer.class) public class ListExchangeFiltersRequest { /** */ - @JsonIgnore - @QueryParam("exchange_id") private String exchangeId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListExchangeFiltersRequest setExchangeId(String exchangeId) { @@ -76,4 +80,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListExchangeFiltersRequestPb toPb() { + ListExchangeFiltersRequestPb pb = new ListExchangeFiltersRequestPb(); + pb.setExchangeId(exchangeId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListExchangeFiltersRequest fromPb(ListExchangeFiltersRequestPb pb) { + ListExchangeFiltersRequest model = new ListExchangeFiltersRequest(); + model.setExchangeId(pb.getExchangeId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListExchangeFiltersRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExchangeFiltersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangeFiltersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangeFiltersRequestDeserializer + extends JsonDeserializer { + @Override + public ListExchangeFiltersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangeFiltersRequestPb pb = mapper.readValue(p, ListExchangeFiltersRequestPb.class); + return ListExchangeFiltersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequestPb.java new file mode 100755 index 000000000..9b9a46fba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List exchange filters */ +@Generated +class ListExchangeFiltersRequestPb { + @JsonIgnore + @QueryParam("exchange_id") + private String exchangeId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExchangeFiltersRequestPb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ListExchangeFiltersRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangeFiltersRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangeFiltersRequestPb that = (ListExchangeFiltersRequestPb) o; + return Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangeFiltersRequestPb.class) + .add("exchangeId", exchangeId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java index f39d977e3..d7b85fd7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListExchangeFiltersResponse.ListExchangeFiltersResponseSerializer.class) +@JsonDeserialize(using = ListExchangeFiltersResponse.ListExchangeFiltersResponseDeserializer.class) public class ListExchangeFiltersResponse { /** */ - @JsonProperty("filters") private Collection filters; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListExchangeFiltersResponse setFilters(Collection filters) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListExchangeFiltersResponsePb toPb() { + ListExchangeFiltersResponsePb pb = new ListExchangeFiltersResponsePb(); + pb.setFilters(filters); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListExchangeFiltersResponse fromPb(ListExchangeFiltersResponsePb pb) { + ListExchangeFiltersResponse model = new ListExchangeFiltersResponse(); + model.setFilters(pb.getFilters()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListExchangeFiltersResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExchangeFiltersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangeFiltersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangeFiltersResponseDeserializer + extends JsonDeserializer { + @Override + public ListExchangeFiltersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangeFiltersResponsePb pb = mapper.readValue(p, ListExchangeFiltersResponsePb.class); + return ListExchangeFiltersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponsePb.java new file mode 100755 index 000000000..107b29952 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListExchangeFiltersResponsePb { + @JsonProperty("filters") + private Collection filters; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangeFiltersResponsePb setFilters(Collection filters) { + this.filters = filters; + return this; + } + + public Collection getFilters() { + return filters; + } + + public ListExchangeFiltersResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangeFiltersResponsePb that = (ListExchangeFiltersResponsePb) o; + return Objects.equals(filters, that.filters) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filters, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangeFiltersResponsePb.class) + .add("filters", filters) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java index 40db7def5..3ce69ca1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java @@ -3,27 +3,33 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List exchanges for listing */ @Generated +@JsonSerialize( + using = ListExchangesForListingRequest.ListExchangesForListingRequestSerializer.class) +@JsonDeserialize( + using = ListExchangesForListingRequest.ListExchangesForListingRequestDeserializer.class) public class ListExchangesForListingRequest { /** */ - @JsonIgnore - @QueryParam("listing_id") private String listingId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListExchangesForListingRequest setListingId(String listingId) { @@ -76,4 +82,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListExchangesForListingRequestPb toPb() { + ListExchangesForListingRequestPb pb = new ListExchangesForListingRequestPb(); + pb.setListingId(listingId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListExchangesForListingRequest fromPb(ListExchangesForListingRequestPb pb) { + ListExchangesForListingRequest model = new ListExchangesForListingRequest(); + model.setListingId(pb.getListingId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListExchangesForListingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExchangesForListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangesForListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangesForListingRequestDeserializer + extends JsonDeserializer { + @Override + public ListExchangesForListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangesForListingRequestPb pb = + mapper.readValue(p, ListExchangesForListingRequestPb.class); + return ListExchangesForListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequestPb.java new file mode 100755 index 000000000..e66d96fe6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List exchanges for listing */ +@Generated +class ListExchangesForListingRequestPb { + @JsonIgnore + @QueryParam("listing_id") + private String listingId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExchangesForListingRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListExchangesForListingRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangesForListingRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesForListingRequestPb that = (ListExchangesForListingRequestPb) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesForListingRequestPb.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java index 7b1fb1990..3e1a237c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListExchangesForListingResponse.ListExchangesForListingResponseSerializer.class) +@JsonDeserialize( + using = ListExchangesForListingResponse.ListExchangesForListingResponseDeserializer.class) public class ListExchangesForListingResponse { /** */ - @JsonProperty("exchange_listing") private Collection exchangeListing; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListExchangesForListingResponse setExchangeListing( @@ -58,4 +69,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListExchangesForListingResponsePb toPb() { + ListExchangesForListingResponsePb pb = new ListExchangesForListingResponsePb(); + pb.setExchangeListing(exchangeListing); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListExchangesForListingResponse fromPb(ListExchangesForListingResponsePb pb) { + ListExchangesForListingResponse model = new ListExchangesForListingResponse(); + model.setExchangeListing(pb.getExchangeListing()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListExchangesForListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExchangesForListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangesForListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangesForListingResponseDeserializer + extends JsonDeserializer { + @Override + public ListExchangesForListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangesForListingResponsePb pb = + mapper.readValue(p, ListExchangesForListingResponsePb.class); + return ListExchangesForListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponsePb.java new file mode 100755 index 000000000..d827a0b2c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListExchangesForListingResponsePb { + @JsonProperty("exchange_listing") + private Collection exchangeListing; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangesForListingResponsePb setExchangeListing( + Collection exchangeListing) { + this.exchangeListing = exchangeListing; + return this; + } + + public Collection getExchangeListing() { + return exchangeListing; + } + + public ListExchangesForListingResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesForListingResponsePb that = (ListExchangesForListingResponsePb) o; + return Objects.equals(exchangeListing, that.exchangeListing) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeListing, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesForListingResponsePb.class) + .add("exchangeListing", exchangeListing) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java index deeec6a78..ee567b8cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List exchanges */ @Generated +@JsonSerialize(using = ListExchangesRequest.ListExchangesRequestSerializer.class) +@JsonDeserialize(using = ListExchangesRequest.ListExchangesRequestDeserializer.class) public class ListExchangesRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListExchangesRequest setPageSize(Long pageSize) { @@ -59,4 +65,42 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListExchangesRequestPb toPb() { + ListExchangesRequestPb pb = new ListExchangesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListExchangesRequest fromPb(ListExchangesRequestPb pb) { + ListExchangesRequest model = new ListExchangesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListExchangesRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListExchangesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangesRequestDeserializer + extends JsonDeserializer { + @Override + public ListExchangesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangesRequestPb pb = mapper.readValue(p, ListExchangesRequestPb.class); + return ListExchangesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequestPb.java new file mode 100755 index 000000000..45f2b06b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List exchanges */ +@Generated +class ListExchangesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExchangesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesRequestPb that = (ListExchangesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java index a3340d21e..a5ab3e96b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListExchangesResponse.ListExchangesResponseSerializer.class) +@JsonDeserialize(using = ListExchangesResponse.ListExchangesResponseDeserializer.class) public class ListExchangesResponse { /** */ - @JsonProperty("exchanges") private Collection exchanges; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListExchangesResponse setExchanges(Collection exchanges) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListExchangesResponsePb toPb() { + ListExchangesResponsePb pb = new ListExchangesResponsePb(); + pb.setExchanges(exchanges); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListExchangesResponse fromPb(ListExchangesResponsePb pb) { + ListExchangesResponse model = new ListExchangesResponse(); + model.setExchanges(pb.getExchanges()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListExchangesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExchangesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExchangesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExchangesResponseDeserializer + extends JsonDeserializer { + @Override + public ListExchangesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExchangesResponsePb pb = mapper.readValue(p, ListExchangesResponsePb.class); + return ListExchangesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponsePb.java new file mode 100755 index 000000000..03ace3893 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListExchangesResponsePb { + @JsonProperty("exchanges") + private Collection exchanges; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangesResponsePb setExchanges(Collection exchanges) { + this.exchanges = exchanges; + return this; + } + + public Collection getExchanges() { + return exchanges; + } + + public ListExchangesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesResponsePb that = (ListExchangesResponsePb) o; + return Objects.equals(exchanges, that.exchanges) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchanges, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesResponsePb.class) + .add("exchanges", exchanges) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java index a84fa947a..c51ba1ea2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List files */ @Generated +@JsonSerialize(using = ListFilesRequest.ListFilesRequestSerializer.class) +@JsonDeserialize(using = ListFilesRequest.ListFilesRequestDeserializer.class) public class ListFilesRequest { /** */ - @JsonIgnore - @QueryParam("file_parent") private FileParent fileParent; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListFilesRequest setFileParent(FileParent fileParent) { @@ -76,4 +80,42 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListFilesRequestPb toPb() { + ListFilesRequestPb pb = new ListFilesRequestPb(); + pb.setFileParent(fileParent); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListFilesRequest fromPb(ListFilesRequestPb pb) { + ListFilesRequest model = new ListFilesRequest(); + model.setFileParent(pb.getFileParent()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListFilesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListFilesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFilesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFilesRequestDeserializer extends JsonDeserializer { + @Override + public ListFilesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFilesRequestPb pb = mapper.readValue(p, ListFilesRequestPb.class); + return ListFilesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequestPb.java new file mode 100755 index 000000000..4cd9c0fad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List files */ +@Generated +class ListFilesRequestPb { + @JsonIgnore + @QueryParam("file_parent") + private FileParent fileParent; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListFilesRequestPb setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public ListFilesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListFilesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFilesRequestPb that = (ListFilesRequestPb) o; + return Objects.equals(fileParent, that.fileParent) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fileParent, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFilesRequestPb.class) + .add("fileParent", fileParent) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java index cb90f3e58..d9d4cefa4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListFilesResponse.ListFilesResponseSerializer.class) +@JsonDeserialize(using = ListFilesResponse.ListFilesResponseDeserializer.class) public class ListFilesResponse { /** */ - @JsonProperty("file_infos") private Collection fileInfos; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListFilesResponse setFileInfos(Collection fileInfos) { @@ -57,4 +66,40 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListFilesResponsePb toPb() { + ListFilesResponsePb pb = new ListFilesResponsePb(); + pb.setFileInfos(fileInfos); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListFilesResponse fromPb(ListFilesResponsePb pb) { + ListFilesResponse model = new ListFilesResponse(); + model.setFileInfos(pb.getFileInfos()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListFilesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListFilesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFilesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFilesResponseDeserializer extends JsonDeserializer { + @Override + public ListFilesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFilesResponsePb pb = mapper.readValue(p, ListFilesResponsePb.class); + return ListFilesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponsePb.java new file mode 100755 index 000000000..e92f1dda5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListFilesResponsePb { + @JsonProperty("file_infos") + private Collection fileInfos; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListFilesResponsePb setFileInfos(Collection fileInfos) { + this.fileInfos = fileInfos; + return this; + } + + public Collection getFileInfos() { + return fileInfos; + } + + public ListFilesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFilesResponsePb that = (ListFilesResponsePb) o; + return Objects.equals(fileInfos, that.fileInfos) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfos, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFilesResponsePb.class) + .add("fileInfos", fileInfos) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java index 15c22e34b..e5f90feee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all listing fulfillments */ @Generated +@JsonSerialize(using = ListFulfillmentsRequest.ListFulfillmentsRequestSerializer.class) +@JsonDeserialize(using = ListFulfillmentsRequest.ListFulfillmentsRequestDeserializer.class) public class ListFulfillmentsRequest { /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListFulfillmentsRequest setListingId(String listingId) { @@ -74,4 +80,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListFulfillmentsRequestPb toPb() { + ListFulfillmentsRequestPb pb = new ListFulfillmentsRequestPb(); + pb.setListingId(listingId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListFulfillmentsRequest fromPb(ListFulfillmentsRequestPb pb) { + ListFulfillmentsRequest model = new ListFulfillmentsRequest(); + model.setListingId(pb.getListingId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListFulfillmentsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFulfillmentsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFulfillmentsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFulfillmentsRequestDeserializer + extends JsonDeserializer { + @Override + public ListFulfillmentsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFulfillmentsRequestPb pb = mapper.readValue(p, ListFulfillmentsRequestPb.class); + return ListFulfillmentsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequestPb.java new file mode 100755 index 000000000..0a0223fcf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all listing fulfillments */ +@Generated +class ListFulfillmentsRequestPb { + @JsonIgnore private String listingId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListFulfillmentsRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListFulfillmentsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListFulfillmentsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFulfillmentsRequestPb that = (ListFulfillmentsRequestPb) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFulfillmentsRequestPb.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java index ca31df7d8..66de019c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListFulfillmentsResponse.ListFulfillmentsResponseSerializer.class) +@JsonDeserialize(using = ListFulfillmentsResponse.ListFulfillmentsResponseDeserializer.class) public class ListFulfillmentsResponse { /** */ - @JsonProperty("fulfillments") private Collection fulfillments; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListFulfillmentsResponse setFulfillments(Collection fulfillments) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListFulfillmentsResponsePb toPb() { + ListFulfillmentsResponsePb pb = new ListFulfillmentsResponsePb(); + pb.setFulfillments(fulfillments); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListFulfillmentsResponse fromPb(ListFulfillmentsResponsePb pb) { + ListFulfillmentsResponse model = new ListFulfillmentsResponse(); + model.setFulfillments(pb.getFulfillments()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListFulfillmentsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFulfillmentsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFulfillmentsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFulfillmentsResponseDeserializer + extends JsonDeserializer { + @Override + public ListFulfillmentsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFulfillmentsResponsePb pb = mapper.readValue(p, ListFulfillmentsResponsePb.class); + return ListFulfillmentsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponsePb.java new file mode 100755 index 000000000..6bca519df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListFulfillmentsResponsePb { + @JsonProperty("fulfillments") + private Collection fulfillments; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListFulfillmentsResponsePb setFulfillments(Collection fulfillments) { + this.fulfillments = fulfillments; + return this; + } + + public Collection getFulfillments() { + return fulfillments; + } + + public ListFulfillmentsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFulfillmentsResponsePb that = (ListFulfillmentsResponsePb) o; + return Objects.equals(fulfillments, that.fulfillments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fulfillments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFulfillmentsResponsePb.class) + .add("fulfillments", fulfillments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java index 80d96fa64..840330c12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List installations for a listing */ @Generated +@JsonSerialize(using = ListInstallationsRequest.ListInstallationsRequestSerializer.class) +@JsonDeserialize(using = ListInstallationsRequest.ListInstallationsRequestDeserializer.class) public class ListInstallationsRequest { /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListInstallationsRequest setListingId(String listingId) { @@ -74,4 +80,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListInstallationsRequestPb toPb() { + ListInstallationsRequestPb pb = new ListInstallationsRequestPb(); + pb.setListingId(listingId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListInstallationsRequest fromPb(ListInstallationsRequestPb pb) { + ListInstallationsRequest model = new ListInstallationsRequest(); + model.setListingId(pb.getListingId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListInstallationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListInstallationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListInstallationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListInstallationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListInstallationsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListInstallationsRequestPb pb = mapper.readValue(p, ListInstallationsRequestPb.class); + return ListInstallationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequestPb.java new file mode 100755 index 000000000..dc0b9e921 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List installations for a listing */ +@Generated +class ListInstallationsRequestPb { + @JsonIgnore private String listingId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListInstallationsRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListInstallationsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListInstallationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstallationsRequestPb that = (ListInstallationsRequestPb) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListInstallationsRequestPb.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java index 3a4b12401..bf2128d14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListInstallationsResponse.ListInstallationsResponseSerializer.class) +@JsonDeserialize(using = ListInstallationsResponse.ListInstallationsResponseDeserializer.class) public class ListInstallationsResponse { /** */ - @JsonProperty("installations") private Collection installations; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListInstallationsResponse setInstallations(Collection installations) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListInstallationsResponsePb toPb() { + ListInstallationsResponsePb pb = new ListInstallationsResponsePb(); + pb.setInstallations(installations); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListInstallationsResponse fromPb(ListInstallationsResponsePb pb) { + ListInstallationsResponse model = new ListInstallationsResponse(); + model.setInstallations(pb.getInstallations()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListInstallationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListInstallationsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListInstallationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListInstallationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListInstallationsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListInstallationsResponsePb pb = mapper.readValue(p, ListInstallationsResponsePb.class); + return ListInstallationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponsePb.java new file mode 100755 index 000000000..813d0b8a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListInstallationsResponsePb { + @JsonProperty("installations") + private Collection installations; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListInstallationsResponsePb setInstallations( + Collection installations) { + this.installations = installations; + return this; + } + + public Collection getInstallations() { + return installations; + } + + public ListInstallationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstallationsResponsePb that = (ListInstallationsResponsePb) o; + return Objects.equals(installations, that.installations) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(installations, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListInstallationsResponsePb.class) + .add("installations", installations) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java index eb49ab129..0bb9170c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java @@ -3,27 +3,33 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List listings for exchange */ @Generated +@JsonSerialize( + using = ListListingsForExchangeRequest.ListListingsForExchangeRequestSerializer.class) +@JsonDeserialize( + using = ListListingsForExchangeRequest.ListListingsForExchangeRequestDeserializer.class) public class ListListingsForExchangeRequest { /** */ - @JsonIgnore - @QueryParam("exchange_id") private String exchangeId; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListListingsForExchangeRequest setExchangeId(String exchangeId) { @@ -76,4 +82,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListListingsForExchangeRequestPb toPb() { + ListListingsForExchangeRequestPb pb = new ListListingsForExchangeRequestPb(); + pb.setExchangeId(exchangeId); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListListingsForExchangeRequest fromPb(ListListingsForExchangeRequestPb pb) { + ListListingsForExchangeRequest model = new ListListingsForExchangeRequest(); + model.setExchangeId(pb.getExchangeId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListListingsForExchangeRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListListingsForExchangeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListListingsForExchangeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListListingsForExchangeRequestDeserializer + extends JsonDeserializer { + @Override + public ListListingsForExchangeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListListingsForExchangeRequestPb pb = + mapper.readValue(p, ListListingsForExchangeRequestPb.class); + return ListListingsForExchangeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequestPb.java new file mode 100755 index 000000000..e28d9d25f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List listings for exchange */ +@Generated +class ListListingsForExchangeRequestPb { + @JsonIgnore + @QueryParam("exchange_id") + private String exchangeId; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListListingsForExchangeRequestPb setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ListListingsForExchangeRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListListingsForExchangeRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsForExchangeRequestPb that = (ListListingsForExchangeRequestPb) o; + return Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsForExchangeRequestPb.class) + .add("exchangeId", exchangeId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java index b88884f8e..100c75e53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListListingsForExchangeResponse.ListListingsForExchangeResponseSerializer.class) +@JsonDeserialize( + using = ListListingsForExchangeResponse.ListListingsForExchangeResponseDeserializer.class) public class ListListingsForExchangeResponse { /** */ - @JsonProperty("exchange_listings") private Collection exchangeListings; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListListingsForExchangeResponse setExchangeListings( @@ -58,4 +69,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListListingsForExchangeResponsePb toPb() { + ListListingsForExchangeResponsePb pb = new ListListingsForExchangeResponsePb(); + pb.setExchangeListings(exchangeListings); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListListingsForExchangeResponse fromPb(ListListingsForExchangeResponsePb pb) { + ListListingsForExchangeResponse model = new ListListingsForExchangeResponse(); + model.setExchangeListings(pb.getExchangeListings()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListListingsForExchangeResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListListingsForExchangeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListListingsForExchangeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListListingsForExchangeResponseDeserializer + extends JsonDeserializer { + @Override + public ListListingsForExchangeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListListingsForExchangeResponsePb pb = + mapper.readValue(p, ListListingsForExchangeResponsePb.class); + return ListListingsForExchangeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponsePb.java new file mode 100755 index 000000000..c162cfe04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListListingsForExchangeResponsePb { + @JsonProperty("exchange_listings") + private Collection exchangeListings; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListListingsForExchangeResponsePb setExchangeListings( + Collection exchangeListings) { + this.exchangeListings = exchangeListings; + return this; + } + + public Collection getExchangeListings() { + return exchangeListings; + } + + public ListListingsForExchangeResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsForExchangeResponsePb that = (ListListingsForExchangeResponsePb) o; + return Objects.equals(exchangeListings, that.exchangeListings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeListings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsForExchangeResponsePb.class) + .add("exchangeListings", exchangeListings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java index 7d4ca0de8..0015f04b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java @@ -3,58 +3,50 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List listings */ @Generated +@JsonSerialize(using = ListListingsRequest.ListListingsRequestSerializer.class) +@JsonDeserialize(using = ListListingsRequest.ListListingsRequestDeserializer.class) public class ListListingsRequest { /** Matches any of the following asset types */ - @JsonIgnore - @QueryParam("assets") private Collection assets; /** Matches any of the following categories */ - @JsonIgnore - @QueryParam("categories") private Collection categories; /** Filters each listing based on if it is free. */ - @JsonIgnore - @QueryParam("is_free") private Boolean isFree; /** Filters each listing based on if it is a private exchange. */ - @JsonIgnore - @QueryParam("is_private_exchange") private Boolean isPrivateExchange; /** Filters each listing based on whether it is a staff pick. */ - @JsonIgnore - @QueryParam("is_staff_pick") private Boolean isStaffPick; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Matches any of the following provider ids */ - @JsonIgnore - @QueryParam("provider_ids") private Collection providerIds; /** Matches any of the following tags */ - @JsonIgnore - @QueryParam("tags") private Collection tags; public ListListingsRequest setAssets(Collection assets) { @@ -182,4 +174,55 @@ public String toString() { .add("tags", tags) .toString(); } + + ListListingsRequestPb toPb() { + ListListingsRequestPb pb = new ListListingsRequestPb(); + pb.setAssets(assets); + pb.setCategories(categories); + pb.setIsFree(isFree); + pb.setIsPrivateExchange(isPrivateExchange); + pb.setIsStaffPick(isStaffPick); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setProviderIds(providerIds); + pb.setTags(tags); + + return pb; + } + + static ListListingsRequest fromPb(ListListingsRequestPb pb) { + ListListingsRequest model = new ListListingsRequest(); + model.setAssets(pb.getAssets()); + model.setCategories(pb.getCategories()); + model.setIsFree(pb.getIsFree()); + model.setIsPrivateExchange(pb.getIsPrivateExchange()); + model.setIsStaffPick(pb.getIsStaffPick()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setProviderIds(pb.getProviderIds()); + model.setTags(pb.getTags()); + + return model; + } + + public static class ListListingsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListListingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListListingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListListingsRequestDeserializer + extends JsonDeserializer { + @Override + public ListListingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListListingsRequestPb pb = mapper.readValue(p, ListListingsRequestPb.class); + return ListListingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequestPb.java new file mode 100755 index 000000000..c5c3a3937 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequestPb.java @@ -0,0 +1,176 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** List listings */ +@Generated +class ListListingsRequestPb { + @JsonIgnore + @QueryParam("assets") + private Collection assets; + + @JsonIgnore + @QueryParam("categories") + private Collection categories; + + @JsonIgnore + @QueryParam("is_free") + private Boolean isFree; + + @JsonIgnore + @QueryParam("is_private_exchange") + private Boolean isPrivateExchange; + + @JsonIgnore + @QueryParam("is_staff_pick") + private Boolean isStaffPick; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("provider_ids") + private Collection providerIds; + + @JsonIgnore + @QueryParam("tags") + private Collection tags; + + public ListListingsRequestPb setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public ListListingsRequestPb setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public ListListingsRequestPb setIsFree(Boolean isFree) { + this.isFree = isFree; + return this; + } + + public Boolean getIsFree() { + return isFree; + } + + public ListListingsRequestPb setIsPrivateExchange(Boolean isPrivateExchange) { + this.isPrivateExchange = isPrivateExchange; + return this; + } + + public Boolean getIsPrivateExchange() { + return isPrivateExchange; + } + + public ListListingsRequestPb setIsStaffPick(Boolean isStaffPick) { + this.isStaffPick = isStaffPick; + return this; + } + + public Boolean getIsStaffPick() { + return isStaffPick; + } + + public ListListingsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListListingsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListListingsRequestPb setProviderIds(Collection providerIds) { + this.providerIds = providerIds; + return this; + } + + public Collection getProviderIds() { + return providerIds; + } + + public ListListingsRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsRequestPb that = (ListListingsRequestPb) o; + return Objects.equals(assets, that.assets) + && Objects.equals(categories, that.categories) + && Objects.equals(isFree, that.isFree) + && Objects.equals(isPrivateExchange, that.isPrivateExchange) + && Objects.equals(isStaffPick, that.isStaffPick) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(providerIds, that.providerIds) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, + categories, + isFree, + isPrivateExchange, + isStaffPick, + pageSize, + pageToken, + providerIds, + tags); + } + + @Override + public String toString() { + return new ToStringer(ListListingsRequestPb.class) + .add("assets", assets) + .add("categories", categories) + .add("isFree", isFree) + .add("isPrivateExchange", isPrivateExchange) + .add("isStaffPick", isStaffPick) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("providerIds", providerIds) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java index 1ec5cf42c..76f47c798 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListListingsResponse.ListListingsResponseSerializer.class) +@JsonDeserialize(using = ListListingsResponse.ListListingsResponseDeserializer.class) public class ListListingsResponse { /** */ - @JsonProperty("listings") private Collection listings; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public ListListingsResponse setListings(Collection listings) { @@ -57,4 +66,42 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListListingsResponsePb toPb() { + ListListingsResponsePb pb = new ListListingsResponsePb(); + pb.setListings(listings); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListListingsResponse fromPb(ListListingsResponsePb pb) { + ListListingsResponse model = new ListListingsResponse(); + model.setListings(pb.getListings()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListListingsResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListListingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListListingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListListingsResponseDeserializer + extends JsonDeserializer { + @Override + public ListListingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListListingsResponsePb pb = mapper.readValue(p, ListListingsResponsePb.class); + return ListListingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponsePb.java new file mode 100755 index 000000000..19079c4c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListListingsResponsePb { + @JsonProperty("listings") + private Collection listings; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListListingsResponsePb setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public ListListingsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsResponsePb that = (ListListingsResponsePb) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsResponsePb.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java index 43dc6cef5..c1ac820cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListProviderAnalyticsDashboardResponse.ListProviderAnalyticsDashboardResponseSerializer + .class) +@JsonDeserialize( + using = + ListProviderAnalyticsDashboardResponse.ListProviderAnalyticsDashboardResponseDeserializer + .class) public class ListProviderAnalyticsDashboardResponse { /** dashboard_id will be used to open Lakeview dashboard. */ - @JsonProperty("dashboard_id") private String dashboardId; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("version") private Long version; public ListProviderAnalyticsDashboardResponse setDashboardId(String dashboardId) { @@ -71,4 +85,49 @@ public String toString() { .add("version", version) .toString(); } + + ListProviderAnalyticsDashboardResponsePb toPb() { + ListProviderAnalyticsDashboardResponsePb pb = new ListProviderAnalyticsDashboardResponsePb(); + pb.setDashboardId(dashboardId); + pb.setId(id); + pb.setVersion(version); + + return pb; + } + + static ListProviderAnalyticsDashboardResponse fromPb( + ListProviderAnalyticsDashboardResponsePb pb) { + ListProviderAnalyticsDashboardResponse model = new ListProviderAnalyticsDashboardResponse(); + model.setDashboardId(pb.getDashboardId()); + model.setId(pb.getId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ListProviderAnalyticsDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProviderAnalyticsDashboardResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListProviderAnalyticsDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProviderAnalyticsDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public ListProviderAnalyticsDashboardResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProviderAnalyticsDashboardResponsePb pb = + mapper.readValue(p, ListProviderAnalyticsDashboardResponsePb.class); + return ListProviderAnalyticsDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponsePb.java new file mode 100755 index 000000000..f70db0345 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListProviderAnalyticsDashboardResponsePb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("id") + private String id; + + @JsonProperty("version") + private Long version; + + public ListProviderAnalyticsDashboardResponsePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ListProviderAnalyticsDashboardResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListProviderAnalyticsDashboardResponsePb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderAnalyticsDashboardResponsePb that = (ListProviderAnalyticsDashboardResponsePb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(id, that.id) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, id, version); + } + + @Override + public String toString() { + return new ToStringer(ListProviderAnalyticsDashboardResponsePb.class) + .add("dashboardId", dashboardId) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java index a609ba7b1..c15718ff2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java @@ -3,27 +3,31 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List providers */ @Generated +@JsonSerialize(using = ListProvidersRequest.ListProvidersRequestSerializer.class) +@JsonDeserialize(using = ListProvidersRequest.ListProvidersRequestDeserializer.class) public class ListProvidersRequest { /** */ - @JsonIgnore - @QueryParam("is_featured") private Boolean isFeatured; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListProvidersRequest setIsFeatured(Boolean isFeatured) { @@ -76,4 +80,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListProvidersRequestPb toPb() { + ListProvidersRequestPb pb = new ListProvidersRequestPb(); + pb.setIsFeatured(isFeatured); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListProvidersRequest fromPb(ListProvidersRequestPb pb) { + ListProvidersRequest model = new ListProvidersRequest(); + model.setIsFeatured(pb.getIsFeatured()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListProvidersRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListProvidersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProvidersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProvidersRequestDeserializer + extends JsonDeserializer { + @Override + public ListProvidersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProvidersRequestPb pb = mapper.readValue(p, ListProvidersRequestPb.class); + return ListProvidersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequestPb.java new file mode 100755 index 000000000..30f0582d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List providers */ +@Generated +class ListProvidersRequestPb { + @JsonIgnore + @QueryParam("is_featured") + private Boolean isFeatured; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListProvidersRequestPb setIsFeatured(Boolean isFeatured) { + this.isFeatured = isFeatured; + return this; + } + + public Boolean getIsFeatured() { + return isFeatured; + } + + public ListProvidersRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListProvidersRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersRequestPb that = (ListProvidersRequestPb) o; + return Objects.equals(isFeatured, that.isFeatured) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(isFeatured, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersRequestPb.class) + .add("isFeatured", isFeatured) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java index e91d16bd7..1787b89a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListProvidersResponse.ListProvidersResponseSerializer.class) +@JsonDeserialize(using = ListProvidersResponse.ListProvidersResponseDeserializer.class) public class ListProvidersResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("providers") private Collection providers; public ListProvidersResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,43 @@ public String toString() { .add("providers", providers) .toString(); } + + ListProvidersResponsePb toPb() { + ListProvidersResponsePb pb = new ListProvidersResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setProviders(providers); + + return pb; + } + + static ListProvidersResponse fromPb(ListProvidersResponsePb pb) { + ListProvidersResponse model = new ListProvidersResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setProviders(pb.getProviders()); + + return model; + } + + public static class ListProvidersResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProvidersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProvidersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProvidersResponseDeserializer + extends JsonDeserializer { + @Override + public ListProvidersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProvidersResponsePb pb = mapper.readValue(p, ListProvidersResponsePb.class); + return ListProvidersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponsePb.java new file mode 100755 index 000000000..8c97c2cfa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListProvidersResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("providers") + private Collection providers; + + public ListProvidersResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListProvidersResponsePb setProviders(Collection providers) { + this.providers = providers; + return this; + } + + public Collection getProviders() { + return providers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersResponsePb that = (ListProvidersResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(providers, that.providers); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, providers); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("providers", providers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java index bf2edad5d..5268d1ae4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Listing.ListingSerializer.class) +@JsonDeserialize(using = Listing.ListingDeserializer.class) public class Listing { /** */ - @JsonProperty("detail") private ListingDetail detail; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("summary") private ListingSummary summary; public Listing setDetail(ListingDetail detail) { @@ -71,4 +79,41 @@ public String toString() { .add("summary", summary) .toString(); } + + ListingPb toPb() { + ListingPb pb = new ListingPb(); + pb.setDetail(detail); + pb.setId(id); + pb.setSummary(summary); + + return pb; + } + + static Listing fromPb(ListingPb pb) { + Listing model = new Listing(); + model.setDetail(pb.getDetail()); + model.setId(pb.getId()); + model.setSummary(pb.getSummary()); + + return model; + } + + public static class ListingSerializer extends JsonSerializer { + @Override + public void serialize(Listing value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingDeserializer extends JsonDeserializer { + @Override + public Listing deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingPb pb = mapper.readValue(p, ListingPb.class); + return Listing.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java index dcf891232..1a14771f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java @@ -4,80 +4,75 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListingDetail.ListingDetailSerializer.class) +@JsonDeserialize(using = ListingDetail.ListingDetailDeserializer.class) public class ListingDetail { /** Type of assets included in the listing. eg. GIT_REPO, DATA_TABLE, MODEL, NOTEBOOK */ - @JsonProperty("assets") private Collection assets; /** The ending date timestamp for when the data spans */ - @JsonProperty("collection_date_end") private Long collectionDateEnd; /** The starting date timestamp for when the data spans */ - @JsonProperty("collection_date_start") private Long collectionDateStart; /** Smallest unit of time in the dataset */ - @JsonProperty("collection_granularity") private DataRefreshInfo collectionGranularity; /** Whether the dataset is free or paid */ - @JsonProperty("cost") private Cost cost; /** Where/how the data is sourced */ - @JsonProperty("data_source") private String dataSource; /** */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("documentation_link") private String documentationLink; /** */ - @JsonProperty("embedded_notebook_file_infos") private Collection embeddedNotebookFileInfos; /** */ - @JsonProperty("file_ids") private Collection fileIds; /** Which geo region the listing data is collected from */ - @JsonProperty("geographical_coverage") private String geographicalCoverage; /** * ID 20, 21 removed don't use License of the data asset - Required for listings with model based * assets */ - @JsonProperty("license") private String license; /** * What the pricing model is (e.g. paid, subscription, paid upfront); should only be present if * cost is paid TODO: Not used yet, should deprecate if we will never use it */ - @JsonProperty("pricing_model") private String pricingModel; /** */ - @JsonProperty("privacy_policy_link") private String privacyPolicyLink; /** size of the dataset in GB */ - @JsonProperty("size") private Double size; /** */ - @JsonProperty("support_link") private String supportLink; /** @@ -88,15 +83,12 @@ public class ListingDetail { * fairly fixed, static and low cardinality (eg. enums). 3. The value won't be used in filters or * joins with other tables. */ - @JsonProperty("tags") private Collection tags; /** */ - @JsonProperty("terms_of_service") private String termsOfService; /** How often data is updated */ - @JsonProperty("update_frequency") private DataRefreshInfo updateFrequency; public ListingDetail setAssets(Collection assets) { @@ -345,4 +337,73 @@ public String toString() { .add("updateFrequency", updateFrequency) .toString(); } + + ListingDetailPb toPb() { + ListingDetailPb pb = new ListingDetailPb(); + pb.setAssets(assets); + pb.setCollectionDateEnd(collectionDateEnd); + pb.setCollectionDateStart(collectionDateStart); + pb.setCollectionGranularity(collectionGranularity); + pb.setCost(cost); + pb.setDataSource(dataSource); + pb.setDescription(description); + pb.setDocumentationLink(documentationLink); + pb.setEmbeddedNotebookFileInfos(embeddedNotebookFileInfos); + pb.setFileIds(fileIds); + pb.setGeographicalCoverage(geographicalCoverage); + pb.setLicense(license); + pb.setPricingModel(pricingModel); + pb.setPrivacyPolicyLink(privacyPolicyLink); + pb.setSize(size); + pb.setSupportLink(supportLink); + pb.setTags(tags); + pb.setTermsOfService(termsOfService); + pb.setUpdateFrequency(updateFrequency); + + return pb; + } + + static ListingDetail fromPb(ListingDetailPb pb) { + ListingDetail model = new ListingDetail(); + model.setAssets(pb.getAssets()); + model.setCollectionDateEnd(pb.getCollectionDateEnd()); + model.setCollectionDateStart(pb.getCollectionDateStart()); + model.setCollectionGranularity(pb.getCollectionGranularity()); + model.setCost(pb.getCost()); + model.setDataSource(pb.getDataSource()); + model.setDescription(pb.getDescription()); + model.setDocumentationLink(pb.getDocumentationLink()); + model.setEmbeddedNotebookFileInfos(pb.getEmbeddedNotebookFileInfos()); + model.setFileIds(pb.getFileIds()); + model.setGeographicalCoverage(pb.getGeographicalCoverage()); + model.setLicense(pb.getLicense()); + model.setPricingModel(pb.getPricingModel()); + model.setPrivacyPolicyLink(pb.getPrivacyPolicyLink()); + model.setSize(pb.getSize()); + model.setSupportLink(pb.getSupportLink()); + model.setTags(pb.getTags()); + model.setTermsOfService(pb.getTermsOfService()); + model.setUpdateFrequency(pb.getUpdateFrequency()); + + return model; + } + + public static class ListingDetailSerializer extends JsonSerializer { + @Override + public void serialize(ListingDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingDetailDeserializer extends JsonDeserializer { + @Override + public ListingDetail deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingDetailPb pb = mapper.readValue(p, ListingDetailPb.class); + return ListingDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetailPb.java new file mode 100755 index 000000000..28813d3d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetailPb.java @@ -0,0 +1,316 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListingDetailPb { + @JsonProperty("assets") + private Collection assets; + + @JsonProperty("collection_date_end") + private Long collectionDateEnd; + + @JsonProperty("collection_date_start") + private Long collectionDateStart; + + @JsonProperty("collection_granularity") + private DataRefreshInfo collectionGranularity; + + @JsonProperty("cost") + private Cost cost; + + @JsonProperty("data_source") + private String dataSource; + + @JsonProperty("description") + private String description; + + @JsonProperty("documentation_link") + private String documentationLink; + + @JsonProperty("embedded_notebook_file_infos") + private Collection embeddedNotebookFileInfos; + + @JsonProperty("file_ids") + private Collection fileIds; + + @JsonProperty("geographical_coverage") + private String geographicalCoverage; + + @JsonProperty("license") + private String license; + + @JsonProperty("pricing_model") + private String pricingModel; + + @JsonProperty("privacy_policy_link") + private String privacyPolicyLink; + + @JsonProperty("size") + private Double size; + + @JsonProperty("support_link") + private String supportLink; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("terms_of_service") + private String termsOfService; + + @JsonProperty("update_frequency") + private DataRefreshInfo updateFrequency; + + public ListingDetailPb setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public ListingDetailPb setCollectionDateEnd(Long collectionDateEnd) { + this.collectionDateEnd = collectionDateEnd; + return this; + } + + public Long getCollectionDateEnd() { + return collectionDateEnd; + } + + public ListingDetailPb setCollectionDateStart(Long collectionDateStart) { + this.collectionDateStart = collectionDateStart; + return this; + } + + public Long getCollectionDateStart() { + return collectionDateStart; + } + + public ListingDetailPb setCollectionGranularity(DataRefreshInfo collectionGranularity) { + this.collectionGranularity = collectionGranularity; + return this; + } + + public DataRefreshInfo getCollectionGranularity() { + return collectionGranularity; + } + + public ListingDetailPb setCost(Cost cost) { + this.cost = cost; + return this; + } + + public Cost getCost() { + return cost; + } + + public ListingDetailPb setDataSource(String dataSource) { + this.dataSource = dataSource; + return this; + } + + public String getDataSource() { + return dataSource; + } + + public ListingDetailPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ListingDetailPb setDocumentationLink(String documentationLink) { + this.documentationLink = documentationLink; + return this; + } + + public String getDocumentationLink() { + return documentationLink; + } + + public ListingDetailPb setEmbeddedNotebookFileInfos( + Collection embeddedNotebookFileInfos) { + this.embeddedNotebookFileInfos = embeddedNotebookFileInfos; + return this; + } + + public Collection getEmbeddedNotebookFileInfos() { + return embeddedNotebookFileInfos; + } + + public ListingDetailPb setFileIds(Collection fileIds) { + this.fileIds = fileIds; + return this; + } + + public Collection getFileIds() { + return fileIds; + } + + public ListingDetailPb setGeographicalCoverage(String geographicalCoverage) { + this.geographicalCoverage = geographicalCoverage; + return this; + } + + public String getGeographicalCoverage() { + return geographicalCoverage; + } + + public ListingDetailPb setLicense(String license) { + this.license = license; + return this; + } + + public String getLicense() { + return license; + } + + public ListingDetailPb setPricingModel(String pricingModel) { + this.pricingModel = pricingModel; + return this; + } + + public String getPricingModel() { + return pricingModel; + } + + public ListingDetailPb setPrivacyPolicyLink(String privacyPolicyLink) { + this.privacyPolicyLink = privacyPolicyLink; + return this; + } + + public String getPrivacyPolicyLink() { + return privacyPolicyLink; + } + + public ListingDetailPb setSize(Double size) { + this.size = size; + return this; + } + + public Double getSize() { + return size; + } + + public ListingDetailPb setSupportLink(String supportLink) { + this.supportLink = supportLink; + return this; + } + + public String getSupportLink() { + return supportLink; + } + + public ListingDetailPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ListingDetailPb setTermsOfService(String termsOfService) { + this.termsOfService = termsOfService; + return this; + } + + public String getTermsOfService() { + return termsOfService; + } + + public ListingDetailPb setUpdateFrequency(DataRefreshInfo updateFrequency) { + this.updateFrequency = updateFrequency; + return this; + } + + public DataRefreshInfo getUpdateFrequency() { + return updateFrequency; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingDetailPb that = (ListingDetailPb) o; + return Objects.equals(assets, that.assets) + && Objects.equals(collectionDateEnd, that.collectionDateEnd) + && Objects.equals(collectionDateStart, that.collectionDateStart) + && Objects.equals(collectionGranularity, that.collectionGranularity) + && Objects.equals(cost, that.cost) + && Objects.equals(dataSource, that.dataSource) + && Objects.equals(description, that.description) + && Objects.equals(documentationLink, that.documentationLink) + && Objects.equals(embeddedNotebookFileInfos, that.embeddedNotebookFileInfos) + && Objects.equals(fileIds, that.fileIds) + && Objects.equals(geographicalCoverage, that.geographicalCoverage) + && Objects.equals(license, that.license) + && Objects.equals(pricingModel, that.pricingModel) + && Objects.equals(privacyPolicyLink, that.privacyPolicyLink) + && Objects.equals(size, that.size) + && Objects.equals(supportLink, that.supportLink) + && Objects.equals(tags, that.tags) + && Objects.equals(termsOfService, that.termsOfService) + && Objects.equals(updateFrequency, that.updateFrequency); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, + collectionDateEnd, + collectionDateStart, + collectionGranularity, + cost, + dataSource, + description, + documentationLink, + embeddedNotebookFileInfos, + fileIds, + geographicalCoverage, + license, + pricingModel, + privacyPolicyLink, + size, + supportLink, + tags, + termsOfService, + updateFrequency); + } + + @Override + public String toString() { + return new ToStringer(ListingDetailPb.class) + .add("assets", assets) + .add("collectionDateEnd", collectionDateEnd) + .add("collectionDateStart", collectionDateStart) + .add("collectionGranularity", collectionGranularity) + .add("cost", cost) + .add("dataSource", dataSource) + .add("description", description) + .add("documentationLink", documentationLink) + .add("embeddedNotebookFileInfos", embeddedNotebookFileInfos) + .add("fileIds", fileIds) + .add("geographicalCoverage", geographicalCoverage) + .add("license", license) + .add("pricingModel", pricingModel) + .add("privacyPolicyLink", privacyPolicyLink) + .add("size", size) + .add("supportLink", supportLink) + .add("tags", tags) + .add("termsOfService", termsOfService) + .add("updateFrequency", updateFrequency) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java index 2fc0506cd..72b79b1cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ListingFulfillment.ListingFulfillmentSerializer.class) +@JsonDeserialize(using = ListingFulfillment.ListingFulfillmentDeserializer.class) public class ListingFulfillment { /** */ - @JsonProperty("fulfillment_type") private FulfillmentType fulfillmentType; /** */ - @JsonProperty("listing_id") private String listingId; /** */ - @JsonProperty("recipient_type") private DeltaSharingRecipientType recipientType; /** */ - @JsonProperty("repo_info") private RepoInfo repoInfo; /** */ - @JsonProperty("share_info") private ShareInfo shareInfo; public ListingFulfillment setFulfillmentType(FulfillmentType fulfillmentType) { @@ -101,4 +107,46 @@ public String toString() { .add("shareInfo", shareInfo) .toString(); } + + ListingFulfillmentPb toPb() { + ListingFulfillmentPb pb = new ListingFulfillmentPb(); + pb.setFulfillmentType(fulfillmentType); + pb.setListingId(listingId); + pb.setRecipientType(recipientType); + pb.setRepoInfo(repoInfo); + pb.setShareInfo(shareInfo); + + return pb; + } + + static ListingFulfillment fromPb(ListingFulfillmentPb pb) { + ListingFulfillment model = new ListingFulfillment(); + model.setFulfillmentType(pb.getFulfillmentType()); + model.setListingId(pb.getListingId()); + model.setRecipientType(pb.getRecipientType()); + model.setRepoInfo(pb.getRepoInfo()); + model.setShareInfo(pb.getShareInfo()); + + return model; + } + + public static class ListingFulfillmentSerializer extends JsonSerializer { + @Override + public void serialize(ListingFulfillment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingFulfillmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingFulfillmentDeserializer extends JsonDeserializer { + @Override + public ListingFulfillment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingFulfillmentPb pb = mapper.readValue(p, ListingFulfillmentPb.class); + return ListingFulfillment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillmentPb.java new file mode 100755 index 000000000..af94c8788 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillmentPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListingFulfillmentPb { + @JsonProperty("fulfillment_type") + private FulfillmentType fulfillmentType; + + @JsonProperty("listing_id") + private String listingId; + + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + @JsonProperty("repo_info") + private RepoInfo repoInfo; + + @JsonProperty("share_info") + private ShareInfo shareInfo; + + public ListingFulfillmentPb setFulfillmentType(FulfillmentType fulfillmentType) { + this.fulfillmentType = fulfillmentType; + return this; + } + + public FulfillmentType getFulfillmentType() { + return fulfillmentType; + } + + public ListingFulfillmentPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListingFulfillmentPb setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public ListingFulfillmentPb setRepoInfo(RepoInfo repoInfo) { + this.repoInfo = repoInfo; + return this; + } + + public RepoInfo getRepoInfo() { + return repoInfo; + } + + public ListingFulfillmentPb setShareInfo(ShareInfo shareInfo) { + this.shareInfo = shareInfo; + return this; + } + + public ShareInfo getShareInfo() { + return shareInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingFulfillmentPb that = (ListingFulfillmentPb) o; + return Objects.equals(fulfillmentType, that.fulfillmentType) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoInfo, that.repoInfo) + && Objects.equals(shareInfo, that.shareInfo); + } + + @Override + public int hashCode() { + return Objects.hash(fulfillmentType, listingId, recipientType, repoInfo, shareInfo); + } + + @Override + public String toString() { + return new ToStringer(ListingFulfillmentPb.class) + .add("fulfillmentType", fulfillmentType) + .add("listingId", listingId) + .add("recipientType", recipientType) + .add("repoInfo", repoInfo) + .add("shareInfo", shareInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingPb.java new file mode 100755 index 000000000..50b53f50a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListingPb { + @JsonProperty("detail") + private ListingDetail detail; + + @JsonProperty("id") + private String id; + + @JsonProperty("summary") + private ListingSummary summary; + + public ListingPb setDetail(ListingDetail detail) { + this.detail = detail; + return this; + } + + public ListingDetail getDetail() { + return detail; + } + + public ListingPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListingPb setSummary(ListingSummary summary) { + this.summary = summary; + return this; + } + + public ListingSummary getSummary() { + return summary; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingPb that = (ListingPb) o; + return Objects.equals(detail, that.detail) + && Objects.equals(id, that.id) + && Objects.equals(summary, that.summary); + } + + @Override + public int hashCode() { + return Objects.hash(detail, id, summary); + } + + @Override + public String toString() { + return new ToStringer(ListingPb.class) + .add("detail", detail) + .add("id", id) + .add("summary", summary) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java index 8013a0cc7..27675800e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ListingSetting.ListingSettingSerializer.class) +@JsonDeserialize(using = ListingSetting.ListingSettingDeserializer.class) public class ListingSetting { /** */ - @JsonProperty("visibility") private Visibility visibility; public ListingSetting setVisibility(Visibility visibility) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListingSetting.class).add("visibility", visibility).toString(); } + + ListingSettingPb toPb() { + ListingSettingPb pb = new ListingSettingPb(); + pb.setVisibility(visibility); + + return pb; + } + + static ListingSetting fromPb(ListingSettingPb pb) { + ListingSetting model = new ListingSetting(); + model.setVisibility(pb.getVisibility()); + + return model; + } + + public static class ListingSettingSerializer extends JsonSerializer { + @Override + public void serialize(ListingSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingSettingDeserializer extends JsonDeserializer { + @Override + public ListingSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingSettingPb pb = mapper.readValue(p, ListingSettingPb.class); + return ListingSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSettingPb.java new file mode 100755 index 000000000..436829fa5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSettingPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListingSettingPb { + @JsonProperty("visibility") + private Visibility visibility; + + public ListingSettingPb setVisibility(Visibility visibility) { + this.visibility = visibility; + return this; + } + + public Visibility getVisibility() { + return visibility; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingSettingPb that = (ListingSettingPb) o; + return Objects.equals(visibility, that.visibility); + } + + @Override + public int hashCode() { + return Objects.hash(visibility); + } + + @Override + public String toString() { + return new ToStringer(ListingSettingPb.class).add("visibility", visibility).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java index 60e960e72..65cb5679d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java @@ -4,89 +4,81 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListingSummary.ListingSummarySerializer.class) +@JsonDeserialize(using = ListingSummary.ListingSummaryDeserializer.class) public class ListingSummary { /** */ - @JsonProperty("categories") private Collection categories; /** */ - @JsonProperty("created_at") private Long createdAt; /** */ - @JsonProperty("created_by") private String createdBy; /** */ - @JsonProperty("created_by_id") private Long createdById; /** */ - @JsonProperty("exchange_ids") private Collection exchangeIds; /** * if a git repo is being created, a listing will be initialized with this field as opposed to a * share */ - @JsonProperty("git_repo") private RepoInfo gitRepo; /** */ - @JsonProperty("listingType") private ListingType listingType; /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("provider_id") private String providerId; /** */ - @JsonProperty("provider_region") private RegionInfo providerRegion; /** */ - @JsonProperty("published_at") private Long publishedAt; /** */ - @JsonProperty("published_by") private String publishedBy; /** */ - @JsonProperty("setting") private ListingSetting setting; /** */ - @JsonProperty("share") private ShareInfo share; /** Enums */ - @JsonProperty("status") private ListingStatus status; /** */ - @JsonProperty("subtitle") private String subtitle; /** */ - @JsonProperty("updated_at") private Long updatedAt; /** */ - @JsonProperty("updated_by") private String updatedBy; /** */ - @JsonProperty("updated_by_id") private Long updatedById; public ListingSummary setCategories(Collection categories) { @@ -334,4 +326,74 @@ public String toString() { .add("updatedById", updatedById) .toString(); } + + ListingSummaryPb toPb() { + ListingSummaryPb pb = new ListingSummaryPb(); + pb.setCategories(categories); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setCreatedById(createdById); + pb.setExchangeIds(exchangeIds); + pb.setGitRepo(gitRepo); + pb.setListingType(listingType); + pb.setName(name); + pb.setProviderId(providerId); + pb.setProviderRegion(providerRegion); + pb.setPublishedAt(publishedAt); + pb.setPublishedBy(publishedBy); + pb.setSetting(setting); + pb.setShare(share); + pb.setStatus(status); + pb.setSubtitle(subtitle); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + pb.setUpdatedById(updatedById); + + return pb; + } + + static ListingSummary fromPb(ListingSummaryPb pb) { + ListingSummary model = new ListingSummary(); + model.setCategories(pb.getCategories()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setCreatedById(pb.getCreatedById()); + model.setExchangeIds(pb.getExchangeIds()); + model.setGitRepo(pb.getGitRepo()); + model.setListingType(pb.getListingType()); + model.setName(pb.getName()); + model.setProviderId(pb.getProviderId()); + model.setProviderRegion(pb.getProviderRegion()); + model.setPublishedAt(pb.getPublishedAt()); + model.setPublishedBy(pb.getPublishedBy()); + model.setSetting(pb.getSetting()); + model.setShare(pb.getShare()); + model.setStatus(pb.getStatus()); + model.setSubtitle(pb.getSubtitle()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + model.setUpdatedById(pb.getUpdatedById()); + + return model; + } + + public static class ListingSummarySerializer extends JsonSerializer { + @Override + public void serialize(ListingSummary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingSummaryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingSummaryDeserializer extends JsonDeserializer { + @Override + public ListingSummary deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingSummaryPb pb = mapper.readValue(p, ListingSummaryPb.class); + return ListingSummary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummaryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummaryPb.java new file mode 100755 index 000000000..c492ffdfe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummaryPb.java @@ -0,0 +1,315 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListingSummaryPb { + @JsonProperty("categories") + private Collection categories; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("created_by_id") + private Long createdById; + + @JsonProperty("exchange_ids") + private Collection exchangeIds; + + @JsonProperty("git_repo") + private RepoInfo gitRepo; + + @JsonProperty("listingType") + private ListingType listingType; + + @JsonProperty("name") + private String name; + + @JsonProperty("provider_id") + private String providerId; + + @JsonProperty("provider_region") + private RegionInfo providerRegion; + + @JsonProperty("published_at") + private Long publishedAt; + + @JsonProperty("published_by") + private String publishedBy; + + @JsonProperty("setting") + private ListingSetting setting; + + @JsonProperty("share") + private ShareInfo share; + + @JsonProperty("status") + private ListingStatus status; + + @JsonProperty("subtitle") + private String subtitle; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + @JsonProperty("updated_by_id") + private Long updatedById; + + public ListingSummaryPb setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public ListingSummaryPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ListingSummaryPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ListingSummaryPb setCreatedById(Long createdById) { + this.createdById = createdById; + return this; + } + + public Long getCreatedById() { + return createdById; + } + + public ListingSummaryPb setExchangeIds(Collection exchangeIds) { + this.exchangeIds = exchangeIds; + return this; + } + + public Collection getExchangeIds() { + return exchangeIds; + } + + public ListingSummaryPb setGitRepo(RepoInfo gitRepo) { + this.gitRepo = gitRepo; + return this; + } + + public RepoInfo getGitRepo() { + return gitRepo; + } + + public ListingSummaryPb setListingType(ListingType listingType) { + this.listingType = listingType; + return this; + } + + public ListingType getListingType() { + return listingType; + } + + public ListingSummaryPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ListingSummaryPb setProviderId(String providerId) { + this.providerId = providerId; + return this; + } + + public String getProviderId() { + return providerId; + } + + public ListingSummaryPb setProviderRegion(RegionInfo providerRegion) { + this.providerRegion = providerRegion; + return this; + } + + public RegionInfo getProviderRegion() { + return providerRegion; + } + + public ListingSummaryPb setPublishedAt(Long publishedAt) { + this.publishedAt = publishedAt; + return this; + } + + public Long getPublishedAt() { + return publishedAt; + } + + public ListingSummaryPb setPublishedBy(String publishedBy) { + this.publishedBy = publishedBy; + return this; + } + + public String getPublishedBy() { + return publishedBy; + } + + public ListingSummaryPb setSetting(ListingSetting setting) { + this.setting = setting; + return this; + } + + public ListingSetting getSetting() { + return setting; + } + + public ListingSummaryPb setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public ListingSummaryPb setStatus(ListingStatus status) { + this.status = status; + return this; + } + + public ListingStatus getStatus() { + return status; + } + + public ListingSummaryPb setSubtitle(String subtitle) { + this.subtitle = subtitle; + return this; + } + + public String getSubtitle() { + return subtitle; + } + + public ListingSummaryPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ListingSummaryPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ListingSummaryPb setUpdatedById(Long updatedById) { + this.updatedById = updatedById; + return this; + } + + public Long getUpdatedById() { + return updatedById; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingSummaryPb that = (ListingSummaryPb) o; + return Objects.equals(categories, that.categories) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(createdById, that.createdById) + && Objects.equals(exchangeIds, that.exchangeIds) + && Objects.equals(gitRepo, that.gitRepo) + && Objects.equals(listingType, that.listingType) + && Objects.equals(name, that.name) + && Objects.equals(providerId, that.providerId) + && Objects.equals(providerRegion, that.providerRegion) + && Objects.equals(publishedAt, that.publishedAt) + && Objects.equals(publishedBy, that.publishedBy) + && Objects.equals(setting, that.setting) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status) + && Objects.equals(subtitle, that.subtitle) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(updatedById, that.updatedById); + } + + @Override + public int hashCode() { + return Objects.hash( + categories, + createdAt, + createdBy, + createdById, + exchangeIds, + gitRepo, + listingType, + name, + providerId, + providerRegion, + publishedAt, + publishedBy, + setting, + share, + status, + subtitle, + updatedAt, + updatedBy, + updatedById); + } + + @Override + public String toString() { + return new ToStringer(ListingSummaryPb.class) + .add("categories", categories) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("createdById", createdById) + .add("exchangeIds", exchangeIds) + .add("gitRepo", gitRepo) + .add("listingType", listingType) + .add("name", name) + .add("providerId", providerId) + .add("providerRegion", providerRegion) + .add("publishedAt", publishedAt) + .add("publishedBy", publishedBy) + .add("setting", setting) + .add("share", share) + .add("status", status) + .add("subtitle", subtitle) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("updatedById", updatedById) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java index c1bfefb78..9be44d1cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListingTag.ListingTagSerializer.class) +@JsonDeserialize(using = ListingTag.ListingTagDeserializer.class) public class ListingTag { /** Tag name (enum) */ - @JsonProperty("tag_name") private ListingTagType tagName; /** String representation of the tag value. Values should be string literals (no complex types) */ - @JsonProperty("tag_values") private Collection tagValues; public ListingTag setTagName(ListingTagType tagName) { @@ -56,4 +65,39 @@ public String toString() { .add("tagValues", tagValues) .toString(); } + + ListingTagPb toPb() { + ListingTagPb pb = new ListingTagPb(); + pb.setTagName(tagName); + pb.setTagValues(tagValues); + + return pb; + } + + static ListingTag fromPb(ListingTagPb pb) { + ListingTag model = new ListingTag(); + model.setTagName(pb.getTagName()); + model.setTagValues(pb.getTagValues()); + + return model; + } + + public static class ListingTagSerializer extends JsonSerializer { + @Override + public void serialize(ListingTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListingTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListingTagDeserializer extends JsonDeserializer { + @Override + public ListingTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListingTagPb pb = mapper.readValue(p, ListingTagPb.class); + return ListingTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagPb.java new file mode 100755 index 000000000..fc79091fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListingTagPb { + @JsonProperty("tag_name") + private ListingTagType tagName; + + @JsonProperty("tag_values") + private Collection tagValues; + + public ListingTagPb setTagName(ListingTagType tagName) { + this.tagName = tagName; + return this; + } + + public ListingTagType getTagName() { + return tagName; + } + + public ListingTagPb setTagValues(Collection tagValues) { + this.tagValues = tagValues; + return this; + } + + public Collection getTagValues() { + return tagValues; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingTagPb that = (ListingTagPb) o; + return Objects.equals(tagName, that.tagName) && Objects.equals(tagValues, that.tagValues); + } + + @Override + public int hashCode() { + return Objects.hash(tagName, tagValues); + } + + @Override + public String toString() { + return new ToStringer(ListingTagPb.class) + .add("tagName", tagName) + .add("tagValues", tagValues) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java index 8765d123b..3eabd88cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java @@ -4,73 +4,68 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PersonalizationRequest.PersonalizationRequestSerializer.class) +@JsonDeserialize(using = PersonalizationRequest.PersonalizationRequestDeserializer.class) public class PersonalizationRequest { /** */ - @JsonProperty("comment") private String comment; /** */ - @JsonProperty("consumer_region") private RegionInfo consumerRegion; /** contact info for the consumer requesting data or performing a listing installation */ - @JsonProperty("contact_info") private ContactInfo contactInfo; /** */ - @JsonProperty("created_at") private Long createdAt; /** */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("intended_use") private String intendedUse; /** */ - @JsonProperty("is_from_lighthouse") private Boolean isFromLighthouse; /** */ - @JsonProperty("listing_id") private String listingId; /** */ - @JsonProperty("listing_name") private String listingName; /** */ - @JsonProperty("metastore_id") private String metastoreId; /** */ - @JsonProperty("provider_id") private String providerId; /** */ - @JsonProperty("recipient_type") private DeltaSharingRecipientType recipientType; /** */ - @JsonProperty("share") private ShareInfo share; /** */ - @JsonProperty("status") private PersonalizationRequestStatus status; /** */ - @JsonProperty("status_message") private String statusMessage; /** */ - @JsonProperty("updated_at") private Long updatedAt; public PersonalizationRequest setComment(String comment) { @@ -282,4 +277,71 @@ public String toString() { .add("updatedAt", updatedAt) .toString(); } + + PersonalizationRequestPb toPb() { + PersonalizationRequestPb pb = new PersonalizationRequestPb(); + pb.setComment(comment); + pb.setConsumerRegion(consumerRegion); + pb.setContactInfo(contactInfo); + pb.setCreatedAt(createdAt); + pb.setId(id); + pb.setIntendedUse(intendedUse); + pb.setIsFromLighthouse(isFromLighthouse); + pb.setListingId(listingId); + pb.setListingName(listingName); + pb.setMetastoreId(metastoreId); + pb.setProviderId(providerId); + pb.setRecipientType(recipientType); + pb.setShare(share); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + pb.setUpdatedAt(updatedAt); + + return pb; + } + + static PersonalizationRequest fromPb(PersonalizationRequestPb pb) { + PersonalizationRequest model = new PersonalizationRequest(); + model.setComment(pb.getComment()); + model.setConsumerRegion(pb.getConsumerRegion()); + model.setContactInfo(pb.getContactInfo()); + model.setCreatedAt(pb.getCreatedAt()); + model.setId(pb.getId()); + model.setIntendedUse(pb.getIntendedUse()); + model.setIsFromLighthouse(pb.getIsFromLighthouse()); + model.setListingId(pb.getListingId()); + model.setListingName(pb.getListingName()); + model.setMetastoreId(pb.getMetastoreId()); + model.setProviderId(pb.getProviderId()); + model.setRecipientType(pb.getRecipientType()); + model.setShare(pb.getShare()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + model.setUpdatedAt(pb.getUpdatedAt()); + + return model; + } + + public static class PersonalizationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PersonalizationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PersonalizationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PersonalizationRequestDeserializer + extends JsonDeserializer { + @Override + public PersonalizationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PersonalizationRequestPb pb = mapper.readValue(p, PersonalizationRequestPb.class); + return PersonalizationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestPb.java new file mode 100755 index 000000000..22514a7c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestPb.java @@ -0,0 +1,269 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PersonalizationRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("consumer_region") + private RegionInfo consumerRegion; + + @JsonProperty("contact_info") + private ContactInfo contactInfo; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("id") + private String id; + + @JsonProperty("intended_use") + private String intendedUse; + + @JsonProperty("is_from_lighthouse") + private Boolean isFromLighthouse; + + @JsonProperty("listing_id") + private String listingId; + + @JsonProperty("listing_name") + private String listingName; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("provider_id") + private String providerId; + + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + @JsonProperty("share") + private ShareInfo share; + + @JsonProperty("status") + private PersonalizationRequestStatus status; + + @JsonProperty("status_message") + private String statusMessage; + + @JsonProperty("updated_at") + private Long updatedAt; + + public PersonalizationRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public PersonalizationRequestPb setConsumerRegion(RegionInfo consumerRegion) { + this.consumerRegion = consumerRegion; + return this; + } + + public RegionInfo getConsumerRegion() { + return consumerRegion; + } + + public PersonalizationRequestPb setContactInfo(ContactInfo contactInfo) { + this.contactInfo = contactInfo; + return this; + } + + public ContactInfo getContactInfo() { + return contactInfo; + } + + public PersonalizationRequestPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public PersonalizationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public PersonalizationRequestPb setIntendedUse(String intendedUse) { + this.intendedUse = intendedUse; + return this; + } + + public String getIntendedUse() { + return intendedUse; + } + + public PersonalizationRequestPb setIsFromLighthouse(Boolean isFromLighthouse) { + this.isFromLighthouse = isFromLighthouse; + return this; + } + + public Boolean getIsFromLighthouse() { + return isFromLighthouse; + } + + public PersonalizationRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public PersonalizationRequestPb setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + public PersonalizationRequestPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public PersonalizationRequestPb setProviderId(String providerId) { + this.providerId = providerId; + return this; + } + + public String getProviderId() { + return providerId; + } + + public PersonalizationRequestPb setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public PersonalizationRequestPb setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public PersonalizationRequestPb setStatus(PersonalizationRequestStatus status) { + this.status = status; + return this; + } + + public PersonalizationRequestStatus getStatus() { + return status; + } + + public PersonalizationRequestPb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public PersonalizationRequestPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PersonalizationRequestPb that = (PersonalizationRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(consumerRegion, that.consumerRegion) + && Objects.equals(contactInfo, that.contactInfo) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(id, that.id) + && Objects.equals(intendedUse, that.intendedUse) + && Objects.equals(isFromLighthouse, that.isFromLighthouse) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(providerId, that.providerId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + consumerRegion, + contactInfo, + createdAt, + id, + intendedUse, + isFromLighthouse, + listingId, + listingName, + metastoreId, + providerId, + recipientType, + share, + status, + statusMessage, + updatedAt); + } + + @Override + public String toString() { + return new ToStringer(PersonalizationRequestPb.class) + .add("comment", comment) + .add("consumerRegion", consumerRegion) + .add("contactInfo", contactInfo) + .add("createdAt", createdAt) + .add("id", id) + .add("intendedUse", intendedUse) + .add("isFromLighthouse", isFromLighthouse) + .add("listingId", listingId) + .add("listingName", listingName) + .add("metastoreId", metastoreId) + .add("providerId", providerId) + .add("recipientType", recipientType) + .add("share", share) + .add("status", status) + .add("statusMessage", statusMessage) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java index a44796fae..3f37ce2d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ProviderAnalyticsDashboard.ProviderAnalyticsDashboardSerializer.class) +@JsonDeserialize(using = ProviderAnalyticsDashboard.ProviderAnalyticsDashboardDeserializer.class) public class ProviderAnalyticsDashboard { /** */ - @JsonProperty("id") private String id; public ProviderAnalyticsDashboard setId(String id) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(ProviderAnalyticsDashboard.class).add("id", id).toString(); } + + ProviderAnalyticsDashboardPb toPb() { + ProviderAnalyticsDashboardPb pb = new ProviderAnalyticsDashboardPb(); + pb.setId(id); + + return pb; + } + + static ProviderAnalyticsDashboard fromPb(ProviderAnalyticsDashboardPb pb) { + ProviderAnalyticsDashboard model = new ProviderAnalyticsDashboard(); + model.setId(pb.getId()); + + return model; + } + + public static class ProviderAnalyticsDashboardSerializer + extends JsonSerializer { + @Override + public void serialize( + ProviderAnalyticsDashboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProviderAnalyticsDashboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProviderAnalyticsDashboardDeserializer + extends JsonDeserializer { + @Override + public ProviderAnalyticsDashboard deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProviderAnalyticsDashboardPb pb = mapper.readValue(p, ProviderAnalyticsDashboardPb.class); + return ProviderAnalyticsDashboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboardPb.java new file mode 100755 index 000000000..e11e1c5e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboardPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ProviderAnalyticsDashboardPb { + @JsonProperty("id") + private String id; + + public ProviderAnalyticsDashboardPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderAnalyticsDashboardPb that = (ProviderAnalyticsDashboardPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(ProviderAnalyticsDashboardPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java index 6baa7bce6..7235941af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java @@ -21,7 +21,7 @@ public CreateExchangeFilterResponse create(CreateExchangeFilterRequest request) String path = "/api/2.0/marketplace-exchange/filters"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateExchangeFilterResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteExchangeFilterRequest request) { String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteExchangeFilterResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ListExchangeFiltersResponse list(ListExchangeFiltersRequest request) { String path = "/api/2.0/marketplace-exchange/filters"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListExchangeFiltersResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public UpdateExchangeFilterResponse update(UpdateExchangeFilterRequest request) String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateExchangeFilterResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java index 5ac2520b3..95938edb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java @@ -21,7 +21,7 @@ public AddExchangeForListingResponse addListingToExchange(AddExchangeForListingR String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AddExchangeForListingResponse.class); @@ -35,7 +35,7 @@ public CreateExchangeResponse create(CreateExchangeRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateExchangeResponse.class); @@ -49,7 +49,7 @@ public void delete(DeleteExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteExchangeResponse.class); } catch (IOException e) { @@ -63,7 +63,7 @@ public void deleteListingFromExchange(RemoveExchangeForListingRequest request) { String.format("/api/2.0/marketplace-exchange/exchanges-for-listing/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, RemoveExchangeForListingResponse.class); } catch (IOException e) { @@ -76,7 +76,7 @@ public GetExchangeResponse get(GetExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetExchangeResponse.class); } catch (IOException e) { @@ -89,7 +89,7 @@ public ListExchangesResponse list(ListExchangesRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListExchangesResponse.class); } catch (IOException e) { @@ -103,7 +103,7 @@ public ListExchangesForListingResponse listExchangesForListing( String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListExchangesForListingResponse.class); } catch (IOException e) { @@ -117,7 +117,7 @@ public ListListingsForExchangeResponse listListingsForExchange( String path = "/api/2.0/marketplace-exchange/listings-for-exchange"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListListingsForExchangeResponse.class); } catch (IOException e) { @@ -130,7 +130,7 @@ public UpdateExchangeResponse update(UpdateExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateExchangeResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java index e8fe25bac..adc5fe1b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java @@ -21,7 +21,7 @@ public CreateFileResponse create(CreateFileRequest request) { String path = "/api/2.0/marketplace-provider/files"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateFileResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteFileRequest request) { String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteFileResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetFileResponse get(GetFileRequest request) { String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetFileResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListFilesResponse list(ListFilesRequest request) { String path = "/api/2.0/marketplace-provider/files"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFilesResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java index 4db7dd65e..9adbe567e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java @@ -4,65 +4,62 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ProviderInfo.ProviderInfoSerializer.class) +@JsonDeserialize(using = ProviderInfo.ProviderInfoDeserializer.class) public class ProviderInfo { /** */ - @JsonProperty("business_contact_email") private String businessContactEmail; /** */ - @JsonProperty("company_website_link") private String companyWebsiteLink; /** */ - @JsonProperty("dark_mode_icon_file_id") private String darkModeIconFileId; /** */ - @JsonProperty("dark_mode_icon_file_path") private String darkModeIconFilePath; /** */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("icon_file_id") private String iconFileId; /** */ - @JsonProperty("icon_file_path") private String iconFilePath; /** */ - @JsonProperty("id") private String id; /** is_featured is accessible by consumers only */ - @JsonProperty("is_featured") private Boolean isFeatured; /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("privacy_policy_link") private String privacyPolicyLink; /** published_by is only applicable to data aggregators (e.g. Crux) */ - @JsonProperty("published_by") private String publishedBy; /** */ - @JsonProperty("support_contact_email") private String supportContactEmail; /** */ - @JsonProperty("term_of_service_link") private String termOfServiceLink; public ProviderInfo setBusinessContactEmail(String businessContactEmail) { @@ -250,4 +247,63 @@ public String toString() { .add("termOfServiceLink", termOfServiceLink) .toString(); } + + ProviderInfoPb toPb() { + ProviderInfoPb pb = new ProviderInfoPb(); + pb.setBusinessContactEmail(businessContactEmail); + pb.setCompanyWebsiteLink(companyWebsiteLink); + pb.setDarkModeIconFileId(darkModeIconFileId); + pb.setDarkModeIconFilePath(darkModeIconFilePath); + pb.setDescription(description); + pb.setIconFileId(iconFileId); + pb.setIconFilePath(iconFilePath); + pb.setId(id); + pb.setIsFeatured(isFeatured); + pb.setName(name); + pb.setPrivacyPolicyLink(privacyPolicyLink); + pb.setPublishedBy(publishedBy); + pb.setSupportContactEmail(supportContactEmail); + pb.setTermOfServiceLink(termOfServiceLink); + + return pb; + } + + static ProviderInfo fromPb(ProviderInfoPb pb) { + ProviderInfo model = new ProviderInfo(); + model.setBusinessContactEmail(pb.getBusinessContactEmail()); + model.setCompanyWebsiteLink(pb.getCompanyWebsiteLink()); + model.setDarkModeIconFileId(pb.getDarkModeIconFileId()); + model.setDarkModeIconFilePath(pb.getDarkModeIconFilePath()); + model.setDescription(pb.getDescription()); + model.setIconFileId(pb.getIconFileId()); + model.setIconFilePath(pb.getIconFilePath()); + model.setId(pb.getId()); + model.setIsFeatured(pb.getIsFeatured()); + model.setName(pb.getName()); + model.setPrivacyPolicyLink(pb.getPrivacyPolicyLink()); + model.setPublishedBy(pb.getPublishedBy()); + model.setSupportContactEmail(pb.getSupportContactEmail()); + model.setTermOfServiceLink(pb.getTermOfServiceLink()); + + return model; + } + + public static class ProviderInfoSerializer extends JsonSerializer { + @Override + public void serialize(ProviderInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProviderInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProviderInfoDeserializer extends JsonDeserializer { + @Override + public ProviderInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProviderInfoPb pb = mapper.readValue(p, ProviderInfoPb.class); + return ProviderInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfoPb.java new file mode 100755 index 000000000..9b8209dad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfoPb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ProviderInfoPb { + @JsonProperty("business_contact_email") + private String businessContactEmail; + + @JsonProperty("company_website_link") + private String companyWebsiteLink; + + @JsonProperty("dark_mode_icon_file_id") + private String darkModeIconFileId; + + @JsonProperty("dark_mode_icon_file_path") + private String darkModeIconFilePath; + + @JsonProperty("description") + private String description; + + @JsonProperty("icon_file_id") + private String iconFileId; + + @JsonProperty("icon_file_path") + private String iconFilePath; + + @JsonProperty("id") + private String id; + + @JsonProperty("is_featured") + private Boolean isFeatured; + + @JsonProperty("name") + private String name; + + @JsonProperty("privacy_policy_link") + private String privacyPolicyLink; + + @JsonProperty("published_by") + private String publishedBy; + + @JsonProperty("support_contact_email") + private String supportContactEmail; + + @JsonProperty("term_of_service_link") + private String termOfServiceLink; + + public ProviderInfoPb setBusinessContactEmail(String businessContactEmail) { + this.businessContactEmail = businessContactEmail; + return this; + } + + public String getBusinessContactEmail() { + return businessContactEmail; + } + + public ProviderInfoPb setCompanyWebsiteLink(String companyWebsiteLink) { + this.companyWebsiteLink = companyWebsiteLink; + return this; + } + + public String getCompanyWebsiteLink() { + return companyWebsiteLink; + } + + public ProviderInfoPb setDarkModeIconFileId(String darkModeIconFileId) { + this.darkModeIconFileId = darkModeIconFileId; + return this; + } + + public String getDarkModeIconFileId() { + return darkModeIconFileId; + } + + public ProviderInfoPb setDarkModeIconFilePath(String darkModeIconFilePath) { + this.darkModeIconFilePath = darkModeIconFilePath; + return this; + } + + public String getDarkModeIconFilePath() { + return darkModeIconFilePath; + } + + public ProviderInfoPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ProviderInfoPb setIconFileId(String iconFileId) { + this.iconFileId = iconFileId; + return this; + } + + public String getIconFileId() { + return iconFileId; + } + + public ProviderInfoPb setIconFilePath(String iconFilePath) { + this.iconFilePath = iconFilePath; + return this; + } + + public String getIconFilePath() { + return iconFilePath; + } + + public ProviderInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ProviderInfoPb setIsFeatured(Boolean isFeatured) { + this.isFeatured = isFeatured; + return this; + } + + public Boolean getIsFeatured() { + return isFeatured; + } + + public ProviderInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ProviderInfoPb setPrivacyPolicyLink(String privacyPolicyLink) { + this.privacyPolicyLink = privacyPolicyLink; + return this; + } + + public String getPrivacyPolicyLink() { + return privacyPolicyLink; + } + + public ProviderInfoPb setPublishedBy(String publishedBy) { + this.publishedBy = publishedBy; + return this; + } + + public String getPublishedBy() { + return publishedBy; + } + + public ProviderInfoPb setSupportContactEmail(String supportContactEmail) { + this.supportContactEmail = supportContactEmail; + return this; + } + + public String getSupportContactEmail() { + return supportContactEmail; + } + + public ProviderInfoPb setTermOfServiceLink(String termOfServiceLink) { + this.termOfServiceLink = termOfServiceLink; + return this; + } + + public String getTermOfServiceLink() { + return termOfServiceLink; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderInfoPb that = (ProviderInfoPb) o; + return Objects.equals(businessContactEmail, that.businessContactEmail) + && Objects.equals(companyWebsiteLink, that.companyWebsiteLink) + && Objects.equals(darkModeIconFileId, that.darkModeIconFileId) + && Objects.equals(darkModeIconFilePath, that.darkModeIconFilePath) + && Objects.equals(description, that.description) + && Objects.equals(iconFileId, that.iconFileId) + && Objects.equals(iconFilePath, that.iconFilePath) + && Objects.equals(id, that.id) + && Objects.equals(isFeatured, that.isFeatured) + && Objects.equals(name, that.name) + && Objects.equals(privacyPolicyLink, that.privacyPolicyLink) + && Objects.equals(publishedBy, that.publishedBy) + && Objects.equals(supportContactEmail, that.supportContactEmail) + && Objects.equals(termOfServiceLink, that.termOfServiceLink); + } + + @Override + public int hashCode() { + return Objects.hash( + businessContactEmail, + companyWebsiteLink, + darkModeIconFileId, + darkModeIconFilePath, + description, + iconFileId, + iconFilePath, + id, + isFeatured, + name, + privacyPolicyLink, + publishedBy, + supportContactEmail, + termOfServiceLink); + } + + @Override + public String toString() { + return new ToStringer(ProviderInfoPb.class) + .add("businessContactEmail", businessContactEmail) + .add("companyWebsiteLink", companyWebsiteLink) + .add("darkModeIconFileId", darkModeIconFileId) + .add("darkModeIconFilePath", darkModeIconFilePath) + .add("description", description) + .add("iconFileId", iconFileId) + .add("iconFilePath", iconFilePath) + .add("id", id) + .add("isFeatured", isFeatured) + .add("name", name) + .add("privacyPolicyLink", privacyPolicyLink) + .add("publishedBy", publishedBy) + .add("supportContactEmail", supportContactEmail) + .add("termOfServiceLink", termOfServiceLink) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java index ed54330ad..9d1382446 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java @@ -21,7 +21,7 @@ public CreateListingResponse create(CreateListingRequest request) { String path = "/api/2.0/marketplace-provider/listing"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateListingResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteListingResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetListingResponse get(GetListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetListingResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public GetListingsResponse list(GetListingsRequest request) { String path = "/api/2.0/marketplace-provider/listings"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetListingsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public UpdateListingResponse update(UpdateListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateListingResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java index a268b3199..21cfa5ab7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java @@ -22,7 +22,7 @@ public ListAllPersonalizationRequestsResponse list( String path = "/api/2.0/marketplace-provider/personalization-requests"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAllPersonalizationRequestsResponse.class); } catch (IOException e) { @@ -38,7 +38,7 @@ public UpdatePersonalizationRequestResponse update(UpdatePersonalizationRequestR request.getListingId(), request.getRequestId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdatePersonalizationRequestResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java index 08bb2d3a6..0a0c82aa1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java @@ -60,7 +60,7 @@ public UpdateProviderAnalyticsDashboardResponse update( String.format("/api/2.0/marketplace-provider/analytics_dashboard/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateProviderAnalyticsDashboardResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java index 618decdf9..14e0d0139 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java @@ -21,7 +21,7 @@ public CreateProviderResponse create(CreateProviderRequest request) { String path = "/api/2.0/marketplace-provider/provider"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateProviderResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteProviderResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetProviderResponse get(GetProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetProviderResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.0/marketplace-provider/providers"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListProvidersResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public UpdateProviderResponse update(UpdateProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateProviderResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java index 4e7cecaca..831135a6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RegionInfo.RegionInfoSerializer.class) +@JsonDeserialize(using = RegionInfo.RegionInfoDeserializer.class) public class RegionInfo { /** */ - @JsonProperty("cloud") private String cloud; /** */ - @JsonProperty("region") private String region; public RegionInfo setCloud(String cloud) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(RegionInfo.class).add("cloud", cloud).add("region", region).toString(); } + + RegionInfoPb toPb() { + RegionInfoPb pb = new RegionInfoPb(); + pb.setCloud(cloud); + pb.setRegion(region); + + return pb; + } + + static RegionInfo fromPb(RegionInfoPb pb) { + RegionInfo model = new RegionInfo(); + model.setCloud(pb.getCloud()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class RegionInfoSerializer extends JsonSerializer { + @Override + public void serialize(RegionInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegionInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegionInfoDeserializer extends JsonDeserializer { + @Override + public RegionInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegionInfoPb pb = mapper.readValue(p, RegionInfoPb.class); + return RegionInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfoPb.java new file mode 100755 index 000000000..218094410 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfoPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegionInfoPb { + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("region") + private String region; + + public RegionInfoPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public RegionInfoPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegionInfoPb that = (RegionInfoPb) o; + return Objects.equals(cloud, that.cloud) && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash(cloud, region); + } + + @Override + public String toString() { + return new ToStringer(RegionInfoPb.class).add("cloud", cloud).add("region", region).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java index 47162e4f5..2b97843ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Remove an exchange for listing */ @Generated +@JsonSerialize( + using = RemoveExchangeForListingRequest.RemoveExchangeForListingRequestSerializer.class) +@JsonDeserialize( + using = RemoveExchangeForListingRequest.RemoveExchangeForListingRequestDeserializer.class) public class RemoveExchangeForListingRequest { /** */ - @JsonIgnore private String id; + private String id; public RemoveExchangeForListingRequest setId(String id) { this.id = id; @@ -39,4 +52,42 @@ public int hashCode() { public String toString() { return new ToStringer(RemoveExchangeForListingRequest.class).add("id", id).toString(); } + + RemoveExchangeForListingRequestPb toPb() { + RemoveExchangeForListingRequestPb pb = new RemoveExchangeForListingRequestPb(); + pb.setId(id); + + return pb; + } + + static RemoveExchangeForListingRequest fromPb(RemoveExchangeForListingRequestPb pb) { + RemoveExchangeForListingRequest model = new RemoveExchangeForListingRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class RemoveExchangeForListingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RemoveExchangeForListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RemoveExchangeForListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RemoveExchangeForListingRequestDeserializer + extends JsonDeserializer { + @Override + public RemoveExchangeForListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RemoveExchangeForListingRequestPb pb = + mapper.readValue(p, RemoveExchangeForListingRequestPb.class); + return RemoveExchangeForListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequestPb.java new file mode 100755 index 000000000..340028bfd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Remove an exchange for listing */ +@Generated +class RemoveExchangeForListingRequestPb { + @JsonIgnore private String id; + + public RemoveExchangeForListingRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RemoveExchangeForListingRequestPb that = (RemoveExchangeForListingRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(RemoveExchangeForListingRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java index 02a6e7364..a67d0a444 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = RemoveExchangeForListingResponse.RemoveExchangeForListingResponseSerializer.class) +@JsonDeserialize( + using = RemoveExchangeForListingResponse.RemoveExchangeForListingResponseDeserializer.class) public class RemoveExchangeForListingResponse { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(RemoveExchangeForListingResponse.class).toString(); } + + RemoveExchangeForListingResponsePb toPb() { + RemoveExchangeForListingResponsePb pb = new RemoveExchangeForListingResponsePb(); + + return pb; + } + + static RemoveExchangeForListingResponse fromPb(RemoveExchangeForListingResponsePb pb) { + RemoveExchangeForListingResponse model = new RemoveExchangeForListingResponse(); + + return model; + } + + public static class RemoveExchangeForListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RemoveExchangeForListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RemoveExchangeForListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RemoveExchangeForListingResponseDeserializer + extends JsonDeserializer { + @Override + public RemoveExchangeForListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RemoveExchangeForListingResponsePb pb = + mapper.readValue(p, RemoveExchangeForListingResponsePb.class); + return RemoveExchangeForListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponsePb.java new file mode 100755 index 000000000..bc99a0021 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RemoveExchangeForListingResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RemoveExchangeForListingResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java index 988491334..23854feda 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RepoInfo.RepoInfoSerializer.class) +@JsonDeserialize(using = RepoInfo.RepoInfoDeserializer.class) public class RepoInfo { /** the git repo url e.g. https://github.com/databrickslabs/dolly.git */ - @JsonProperty("git_repo_url") private String gitRepoUrl; public RepoInfo setGitRepoUrl(String gitRepoUrl) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(RepoInfo.class).add("gitRepoUrl", gitRepoUrl).toString(); } + + RepoInfoPb toPb() { + RepoInfoPb pb = new RepoInfoPb(); + pb.setGitRepoUrl(gitRepoUrl); + + return pb; + } + + static RepoInfo fromPb(RepoInfoPb pb) { + RepoInfo model = new RepoInfo(); + model.setGitRepoUrl(pb.getGitRepoUrl()); + + return model; + } + + public static class RepoInfoSerializer extends JsonSerializer { + @Override + public void serialize(RepoInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoInfoDeserializer extends JsonDeserializer { + @Override + public RepoInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoInfoPb pb = mapper.readValue(p, RepoInfoPb.class); + return RepoInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfoPb.java new file mode 100755 index 000000000..03d099824 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfoPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RepoInfoPb { + @JsonProperty("git_repo_url") + private String gitRepoUrl; + + public RepoInfoPb setGitRepoUrl(String gitRepoUrl) { + this.gitRepoUrl = gitRepoUrl; + return this; + } + + public String getGitRepoUrl() { + return gitRepoUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoInfoPb that = (RepoInfoPb) o; + return Objects.equals(gitRepoUrl, that.gitRepoUrl); + } + + @Override + public int hashCode() { + return Objects.hash(gitRepoUrl); + } + + @Override + public String toString() { + return new ToStringer(RepoInfoPb.class).add("gitRepoUrl", gitRepoUrl).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java index 33a4ad683..c259cb938 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RepoInstallation.RepoInstallationSerializer.class) +@JsonDeserialize(using = RepoInstallation.RepoInstallationDeserializer.class) public class RepoInstallation { /** the user-specified repo name for their installed git repo listing */ - @JsonProperty("repo_name") private String repoName; /** * refers to the full url file path that navigates the user to the repo's entrypoint (e.g. a * README.md file, or the repo file view in the unified UI) should just be a relative path */ - @JsonProperty("repo_path") private String repoPath; public RepoInstallation setRepoName(String repoName) { @@ -58,4 +67,40 @@ public String toString() { .add("repoPath", repoPath) .toString(); } + + RepoInstallationPb toPb() { + RepoInstallationPb pb = new RepoInstallationPb(); + pb.setRepoName(repoName); + pb.setRepoPath(repoPath); + + return pb; + } + + static RepoInstallation fromPb(RepoInstallationPb pb) { + RepoInstallation model = new RepoInstallation(); + model.setRepoName(pb.getRepoName()); + model.setRepoPath(pb.getRepoPath()); + + return model; + } + + public static class RepoInstallationSerializer extends JsonSerializer { + @Override + public void serialize(RepoInstallation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoInstallationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoInstallationDeserializer extends JsonDeserializer { + @Override + public RepoInstallation deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoInstallationPb pb = mapper.readValue(p, RepoInstallationPb.class); + return RepoInstallation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallationPb.java new file mode 100755 index 000000000..0239453e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallationPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RepoInstallationPb { + @JsonProperty("repo_name") + private String repoName; + + @JsonProperty("repo_path") + private String repoPath; + + public RepoInstallationPb setRepoName(String repoName) { + this.repoName = repoName; + return this; + } + + public String getRepoName() { + return repoName; + } + + public RepoInstallationPb setRepoPath(String repoPath) { + this.repoPath = repoPath; + return this; + } + + public String getRepoPath() { + return repoPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoInstallationPb that = (RepoInstallationPb) o; + return Objects.equals(repoName, that.repoName) && Objects.equals(repoPath, that.repoPath); + } + + @Override + public int hashCode() { + return Objects.hash(repoName, repoPath); + } + + @Override + public String toString() { + return new ToStringer(RepoInstallationPb.class) + .add("repoName", repoName) + .add("repoPath", repoPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java index 5b8d3a47a..3f3ce300d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java @@ -3,53 +3,47 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Search listings */ @Generated +@JsonSerialize(using = SearchListingsRequest.SearchListingsRequestSerializer.class) +@JsonDeserialize(using = SearchListingsRequest.SearchListingsRequestDeserializer.class) public class SearchListingsRequest { /** Matches any of the following asset types */ - @JsonIgnore - @QueryParam("assets") private Collection assets; /** Matches any of the following categories */ - @JsonIgnore - @QueryParam("categories") private Collection categories; /** */ - @JsonIgnore - @QueryParam("is_free") private Boolean isFree; /** */ - @JsonIgnore - @QueryParam("is_private_exchange") private Boolean isPrivateExchange; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Matches any of the following provider ids */ - @JsonIgnore - @QueryParam("provider_ids") private Collection providerIds; /** Fuzzy matches query */ - @JsonIgnore - @QueryParam("query") private String query; public SearchListingsRequest setAssets(Collection assets) { @@ -158,4 +152,55 @@ public String toString() { .add("query", query) .toString(); } + + SearchListingsRequestPb toPb() { + SearchListingsRequestPb pb = new SearchListingsRequestPb(); + pb.setAssets(assets); + pb.setCategories(categories); + pb.setIsFree(isFree); + pb.setIsPrivateExchange(isPrivateExchange); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setProviderIds(providerIds); + pb.setQuery(query); + + return pb; + } + + static SearchListingsRequest fromPb(SearchListingsRequestPb pb) { + SearchListingsRequest model = new SearchListingsRequest(); + model.setAssets(pb.getAssets()); + model.setCategories(pb.getCategories()); + model.setIsFree(pb.getIsFree()); + model.setIsPrivateExchange(pb.getIsPrivateExchange()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setProviderIds(pb.getProviderIds()); + model.setQuery(pb.getQuery()); + + return model; + } + + public static class SearchListingsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchListingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchListingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchListingsRequestDeserializer + extends JsonDeserializer { + @Override + public SearchListingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchListingsRequestPb pb = mapper.readValue(p, SearchListingsRequestPb.class); + return SearchListingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequestPb.java new file mode 100755 index 000000000..6aa5bfa8e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequestPb.java @@ -0,0 +1,153 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Search listings */ +@Generated +class SearchListingsRequestPb { + @JsonIgnore + @QueryParam("assets") + private Collection assets; + + @JsonIgnore + @QueryParam("categories") + private Collection categories; + + @JsonIgnore + @QueryParam("is_free") + private Boolean isFree; + + @JsonIgnore + @QueryParam("is_private_exchange") + private Boolean isPrivateExchange; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("provider_ids") + private Collection providerIds; + + @JsonIgnore + @QueryParam("query") + private String query; + + public SearchListingsRequestPb setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public SearchListingsRequestPb setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public SearchListingsRequestPb setIsFree(Boolean isFree) { + this.isFree = isFree; + return this; + } + + public Boolean getIsFree() { + return isFree; + } + + public SearchListingsRequestPb setIsPrivateExchange(Boolean isPrivateExchange) { + this.isPrivateExchange = isPrivateExchange; + return this; + } + + public Boolean getIsPrivateExchange() { + return isPrivateExchange; + } + + public SearchListingsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public SearchListingsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public SearchListingsRequestPb setProviderIds(Collection providerIds) { + this.providerIds = providerIds; + return this; + } + + public Collection getProviderIds() { + return providerIds; + } + + public SearchListingsRequestPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchListingsRequestPb that = (SearchListingsRequestPb) o; + return Objects.equals(assets, that.assets) + && Objects.equals(categories, that.categories) + && Objects.equals(isFree, that.isFree) + && Objects.equals(isPrivateExchange, that.isPrivateExchange) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(providerIds, that.providerIds) + && Objects.equals(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, categories, isFree, isPrivateExchange, pageSize, pageToken, providerIds, query); + } + + @Override + public String toString() { + return new ToStringer(SearchListingsRequestPb.class) + .add("assets", assets) + .add("categories", categories) + .add("isFree", isFree) + .add("isPrivateExchange", isPrivateExchange) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("providerIds", providerIds) + .add("query", query) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java index d0c714e34..826d4d020 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchListingsResponse.SearchListingsResponseSerializer.class) +@JsonDeserialize(using = SearchListingsResponse.SearchListingsResponseDeserializer.class) public class SearchListingsResponse { /** */ - @JsonProperty("listings") private Collection listings; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public SearchListingsResponse setListings(Collection listings) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + SearchListingsResponsePb toPb() { + SearchListingsResponsePb pb = new SearchListingsResponsePb(); + pb.setListings(listings); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static SearchListingsResponse fromPb(SearchListingsResponsePb pb) { + SearchListingsResponse model = new SearchListingsResponse(); + model.setListings(pb.getListings()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class SearchListingsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchListingsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchListingsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchListingsResponseDeserializer + extends JsonDeserializer { + @Override + public SearchListingsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchListingsResponsePb pb = mapper.readValue(p, SearchListingsResponsePb.class); + return SearchListingsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponsePb.java new file mode 100755 index 000000000..deddd9401 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchListingsResponsePb { + @JsonProperty("listings") + private Collection listings; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchListingsResponsePb setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public SearchListingsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchListingsResponsePb that = (SearchListingsResponsePb) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchListingsResponsePb.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java index 5b656447a..90836dd5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ShareInfo.ShareInfoSerializer.class) +@JsonDeserialize(using = ShareInfo.ShareInfoDeserializer.class) public class ShareInfo { /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("type") private ListingShareType typeValue; public ShareInfo setName(String name) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(ShareInfo.class).add("name", name).add("typeValue", typeValue).toString(); } + + ShareInfoPb toPb() { + ShareInfoPb pb = new ShareInfoPb(); + pb.setName(name); + pb.setType(typeValue); + + return pb; + } + + static ShareInfo fromPb(ShareInfoPb pb) { + ShareInfo model = new ShareInfo(); + model.setName(pb.getName()); + model.setType(pb.getType()); + + return model; + } + + public static class ShareInfoSerializer extends JsonSerializer { + @Override + public void serialize(ShareInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ShareInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ShareInfoDeserializer extends JsonDeserializer { + @Override + public ShareInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ShareInfoPb pb = mapper.readValue(p, ShareInfoPb.class); + return ShareInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfoPb.java new file mode 100755 index 000000000..7f178fe0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfoPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ShareInfoPb { + @JsonProperty("name") + private String name; + + @JsonProperty("type") + private ListingShareType typeValue; + + public ShareInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ShareInfoPb setType(ListingShareType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ListingShareType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShareInfoPb that = (ShareInfoPb) o; + return Objects.equals(name, that.name) && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(name, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ShareInfoPb.class) + .add("name", name) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java index 6cedfa277..6c28014ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SharedDataObject.SharedDataObjectSerializer.class) +@JsonDeserialize(using = SharedDataObject.SharedDataObjectDeserializer.class) public class SharedDataObject { /** The type of the data object. Could be one of: TABLE, SCHEMA, NOTEBOOK_FILE, MODEL, VOLUME */ - @JsonProperty("data_object_type") private String dataObjectType; /** Name of the shared object */ - @JsonProperty("name") private String name; public SharedDataObject setDataObjectType(String dataObjectType) { @@ -55,4 +64,40 @@ public String toString() { .add("name", name) .toString(); } + + SharedDataObjectPb toPb() { + SharedDataObjectPb pb = new SharedDataObjectPb(); + pb.setDataObjectType(dataObjectType); + pb.setName(name); + + return pb; + } + + static SharedDataObject fromPb(SharedDataObjectPb pb) { + SharedDataObject model = new SharedDataObject(); + model.setDataObjectType(pb.getDataObjectType()); + model.setName(pb.getName()); + + return model; + } + + public static class SharedDataObjectSerializer extends JsonSerializer { + @Override + public void serialize(SharedDataObject value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SharedDataObjectPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SharedDataObjectDeserializer extends JsonDeserializer { + @Override + public SharedDataObject deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SharedDataObjectPb pb = mapper.readValue(p, SharedDataObjectPb.class); + return SharedDataObject.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObjectPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObjectPb.java new file mode 100755 index 000000000..10f7c7acc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObjectPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SharedDataObjectPb { + @JsonProperty("data_object_type") + private String dataObjectType; + + @JsonProperty("name") + private String name; + + public SharedDataObjectPb setDataObjectType(String dataObjectType) { + this.dataObjectType = dataObjectType; + return this; + } + + public String getDataObjectType() { + return dataObjectType; + } + + public SharedDataObjectPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharedDataObjectPb that = (SharedDataObjectPb) o; + return Objects.equals(dataObjectType, that.dataObjectType) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(dataObjectType, name); + } + + @Override + public String toString() { + return new ToStringer(SharedDataObjectPb.class) + .add("dataObjectType", dataObjectType) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java index 646ef5e65..2b0832266 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenDetail.TokenDetailSerializer.class) +@JsonDeserialize(using = TokenDetail.TokenDetailDeserializer.class) public class TokenDetail { /** */ - @JsonProperty("bearerToken") private String bearerToken; /** */ - @JsonProperty("endpoint") private String endpoint; /** */ - @JsonProperty("expirationTime") private String expirationTime; /** * These field names must follow the delta sharing protocol. Original message: * RetrieveToken.Response in managed-catalog/api/messages/recipient.proto */ - @JsonProperty("shareCredentialsVersion") private Long shareCredentialsVersion; public TokenDetail setBearerToken(String bearerToken) { @@ -89,4 +96,43 @@ public String toString() { .add("shareCredentialsVersion", shareCredentialsVersion) .toString(); } + + TokenDetailPb toPb() { + TokenDetailPb pb = new TokenDetailPb(); + pb.setBearerToken(bearerToken); + pb.setEndpoint(endpoint); + pb.setExpirationTime(expirationTime); + pb.setShareCredentialsVersion(shareCredentialsVersion); + + return pb; + } + + static TokenDetail fromPb(TokenDetailPb pb) { + TokenDetail model = new TokenDetail(); + model.setBearerToken(pb.getBearerToken()); + model.setEndpoint(pb.getEndpoint()); + model.setExpirationTime(pb.getExpirationTime()); + model.setShareCredentialsVersion(pb.getShareCredentialsVersion()); + + return model; + } + + public static class TokenDetailSerializer extends JsonSerializer { + @Override + public void serialize(TokenDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenDetailDeserializer extends JsonDeserializer { + @Override + public TokenDetail deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenDetailPb pb = mapper.readValue(p, TokenDetailPb.class); + return TokenDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetailPb.java new file mode 100755 index 000000000..c4472048f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetailPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenDetailPb { + @JsonProperty("bearerToken") + private String bearerToken; + + @JsonProperty("endpoint") + private String endpoint; + + @JsonProperty("expirationTime") + private String expirationTime; + + @JsonProperty("shareCredentialsVersion") + private Long shareCredentialsVersion; + + public TokenDetailPb setBearerToken(String bearerToken) { + this.bearerToken = bearerToken; + return this; + } + + public String getBearerToken() { + return bearerToken; + } + + public TokenDetailPb setEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + public String getEndpoint() { + return endpoint; + } + + public TokenDetailPb setExpirationTime(String expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public String getExpirationTime() { + return expirationTime; + } + + public TokenDetailPb setShareCredentialsVersion(Long shareCredentialsVersion) { + this.shareCredentialsVersion = shareCredentialsVersion; + return this; + } + + public Long getShareCredentialsVersion() { + return shareCredentialsVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenDetailPb that = (TokenDetailPb) o; + return Objects.equals(bearerToken, that.bearerToken) + && Objects.equals(endpoint, that.endpoint) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(shareCredentialsVersion, that.shareCredentialsVersion); + } + + @Override + public int hashCode() { + return Objects.hash(bearerToken, endpoint, expirationTime, shareCredentialsVersion); + } + + @Override + public String toString() { + return new ToStringer(TokenDetailPb.class) + .add("bearerToken", bearerToken) + .add("endpoint", endpoint) + .add("expirationTime", expirationTime) + .add("shareCredentialsVersion", shareCredentialsVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java index 42143c446..6920fca10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java @@ -4,40 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenInfo.TokenInfoSerializer.class) +@JsonDeserialize(using = TokenInfo.TokenInfoDeserializer.class) public class TokenInfo { /** * Full activation url to retrieve the access token. It will be empty if the token is already * retrieved. */ - @JsonProperty("activation_url") private String activationUrl; /** Time at which this Recipient Token was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of Recipient Token creator. */ - @JsonProperty("created_by") private String createdBy; /** Expiration timestamp of the token in epoch milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** Unique id of the Recipient Token. */ - @JsonProperty("id") private String id; /** Time at which this Recipient Token was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of Recipient Token updater. */ - @JsonProperty("updated_by") private String updatedBy; public TokenInfo setActivationUrl(String activationUrl) { @@ -135,4 +139,49 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + TokenInfoPb toPb() { + TokenInfoPb pb = new TokenInfoPb(); + pb.setActivationUrl(activationUrl); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setExpirationTime(expirationTime); + pb.setId(id); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static TokenInfo fromPb(TokenInfoPb pb) { + TokenInfo model = new TokenInfo(); + model.setActivationUrl(pb.getActivationUrl()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setExpirationTime(pb.getExpirationTime()); + model.setId(pb.getId()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class TokenInfoSerializer extends JsonSerializer { + @Override + public void serialize(TokenInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenInfoDeserializer extends JsonDeserializer { + @Override + public TokenInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenInfoPb pb = mapper.readValue(p, TokenInfoPb.class); + return TokenInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfoPb.java new file mode 100755 index 000000000..7a7548d3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfoPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenInfoPb { + @JsonProperty("activation_url") + private String activationUrl; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("id") + private String id; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public TokenInfoPb setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + public TokenInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public TokenInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public TokenInfoPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public TokenInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public TokenInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public TokenInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenInfoPb that = (TokenInfoPb) o; + return Objects.equals(activationUrl, that.activationUrl) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(id, that.id) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + activationUrl, createdAt, createdBy, expirationTime, id, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(TokenInfoPb.class) + .add("activationUrl", activationUrl) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("expirationTime", expirationTime) + .add("id", id) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java index 88d88b7ca..3a3246b35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExchangeFilterRequest.UpdateExchangeFilterRequestSerializer.class) +@JsonDeserialize(using = UpdateExchangeFilterRequest.UpdateExchangeFilterRequestDeserializer.class) public class UpdateExchangeFilterRequest { /** */ - @JsonProperty("filter") private ExchangeFilter filter; /** */ - @JsonIgnore private String id; + private String id; public UpdateExchangeFilterRequest setFilter(ExchangeFilter filter) { this.filter = filter; @@ -55,4 +64,43 @@ public String toString() { .add("id", id) .toString(); } + + UpdateExchangeFilterRequestPb toPb() { + UpdateExchangeFilterRequestPb pb = new UpdateExchangeFilterRequestPb(); + pb.setFilter(filter); + pb.setId(id); + + return pb; + } + + static UpdateExchangeFilterRequest fromPb(UpdateExchangeFilterRequestPb pb) { + UpdateExchangeFilterRequest model = new UpdateExchangeFilterRequest(); + model.setFilter(pb.getFilter()); + model.setId(pb.getId()); + + return model; + } + + public static class UpdateExchangeFilterRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExchangeFilterRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExchangeFilterRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExchangeFilterRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateExchangeFilterRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExchangeFilterRequestPb pb = mapper.readValue(p, UpdateExchangeFilterRequestPb.class); + return UpdateExchangeFilterRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequestPb.java new file mode 100755 index 000000000..1045946b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExchangeFilterRequestPb { + @JsonProperty("filter") + private ExchangeFilter filter; + + @JsonIgnore private String id; + + public UpdateExchangeFilterRequestPb setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + public UpdateExchangeFilterRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeFilterRequestPb that = (UpdateExchangeFilterRequestPb) o; + return Objects.equals(filter, that.filter) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(filter, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeFilterRequestPb.class) + .add("filter", filter) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java index fd76d6eb7..1d39f9349 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExchangeFilterResponse.UpdateExchangeFilterResponseSerializer.class) +@JsonDeserialize( + using = UpdateExchangeFilterResponse.UpdateExchangeFilterResponseDeserializer.class) public class UpdateExchangeFilterResponse { /** */ - @JsonProperty("filter") private ExchangeFilter filter; public UpdateExchangeFilterResponse setFilter(ExchangeFilter filter) { @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateExchangeFilterResponse.class).add("filter", filter).toString(); } + + UpdateExchangeFilterResponsePb toPb() { + UpdateExchangeFilterResponsePb pb = new UpdateExchangeFilterResponsePb(); + pb.setFilter(filter); + + return pb; + } + + static UpdateExchangeFilterResponse fromPb(UpdateExchangeFilterResponsePb pb) { + UpdateExchangeFilterResponse model = new UpdateExchangeFilterResponse(); + model.setFilter(pb.getFilter()); + + return model; + } + + public static class UpdateExchangeFilterResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExchangeFilterResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExchangeFilterResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExchangeFilterResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateExchangeFilterResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExchangeFilterResponsePb pb = mapper.readValue(p, UpdateExchangeFilterResponsePb.class); + return UpdateExchangeFilterResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponsePb.java new file mode 100755 index 000000000..7108b3f39 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExchangeFilterResponsePb { + @JsonProperty("filter") + private ExchangeFilter filter; + + public UpdateExchangeFilterResponsePb setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeFilterResponsePb that = (UpdateExchangeFilterResponsePb) o; + return Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeFilterResponsePb.class).add("filter", filter).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java index 3b61fe4dd..8b6888830 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExchangeRequest.UpdateExchangeRequestSerializer.class) +@JsonDeserialize(using = UpdateExchangeRequest.UpdateExchangeRequestDeserializer.class) public class UpdateExchangeRequest { /** */ - @JsonProperty("exchange") private Exchange exchange; /** */ - @JsonIgnore private String id; + private String id; public UpdateExchangeRequest setExchange(Exchange exchange) { this.exchange = exchange; @@ -55,4 +64,43 @@ public String toString() { .add("id", id) .toString(); } + + UpdateExchangeRequestPb toPb() { + UpdateExchangeRequestPb pb = new UpdateExchangeRequestPb(); + pb.setExchange(exchange); + pb.setId(id); + + return pb; + } + + static UpdateExchangeRequest fromPb(UpdateExchangeRequestPb pb) { + UpdateExchangeRequest model = new UpdateExchangeRequest(); + model.setExchange(pb.getExchange()); + model.setId(pb.getId()); + + return model; + } + + public static class UpdateExchangeRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExchangeRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExchangeRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExchangeRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateExchangeRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExchangeRequestPb pb = mapper.readValue(p, UpdateExchangeRequestPb.class); + return UpdateExchangeRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequestPb.java new file mode 100755 index 000000000..5ea4cabbb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExchangeRequestPb { + @JsonProperty("exchange") + private Exchange exchange; + + @JsonIgnore private String id; + + public UpdateExchangeRequestPb setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + public UpdateExchangeRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeRequestPb that = (UpdateExchangeRequestPb) o; + return Objects.equals(exchange, that.exchange) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(exchange, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeRequestPb.class) + .add("exchange", exchange) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java index d7d55e34f..f4af86aeb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExchangeResponse.UpdateExchangeResponseSerializer.class) +@JsonDeserialize(using = UpdateExchangeResponse.UpdateExchangeResponseDeserializer.class) public class UpdateExchangeResponse { /** */ - @JsonProperty("exchange") private Exchange exchange; public UpdateExchangeResponse setExchange(Exchange exchange) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateExchangeResponse.class).add("exchange", exchange).toString(); } + + UpdateExchangeResponsePb toPb() { + UpdateExchangeResponsePb pb = new UpdateExchangeResponsePb(); + pb.setExchange(exchange); + + return pb; + } + + static UpdateExchangeResponse fromPb(UpdateExchangeResponsePb pb) { + UpdateExchangeResponse model = new UpdateExchangeResponse(); + model.setExchange(pb.getExchange()); + + return model; + } + + public static class UpdateExchangeResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExchangeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExchangeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExchangeResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateExchangeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExchangeResponsePb pb = mapper.readValue(p, UpdateExchangeResponsePb.class); + return UpdateExchangeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponsePb.java new file mode 100755 index 000000000..bf4b50956 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExchangeResponsePb { + @JsonProperty("exchange") + private Exchange exchange; + + public UpdateExchangeResponsePb setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeResponsePb that = (UpdateExchangeResponsePb) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeResponsePb.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java index 8f7c53335..1c452f9b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateInstallationRequest.UpdateInstallationRequestSerializer.class) +@JsonDeserialize(using = UpdateInstallationRequest.UpdateInstallationRequestDeserializer.class) public class UpdateInstallationRequest { /** */ - @JsonProperty("installation") private InstallationDetail installation; /** */ - @JsonIgnore private String installationId; + private String installationId; /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonProperty("rotate_token") private Boolean rotateToken; public UpdateInstallationRequest setInstallation(InstallationDetail installation) { @@ -85,4 +93,47 @@ public String toString() { .add("rotateToken", rotateToken) .toString(); } + + UpdateInstallationRequestPb toPb() { + UpdateInstallationRequestPb pb = new UpdateInstallationRequestPb(); + pb.setInstallation(installation); + pb.setInstallationId(installationId); + pb.setListingId(listingId); + pb.setRotateToken(rotateToken); + + return pb; + } + + static UpdateInstallationRequest fromPb(UpdateInstallationRequestPb pb) { + UpdateInstallationRequest model = new UpdateInstallationRequest(); + model.setInstallation(pb.getInstallation()); + model.setInstallationId(pb.getInstallationId()); + model.setListingId(pb.getListingId()); + model.setRotateToken(pb.getRotateToken()); + + return model; + } + + public static class UpdateInstallationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateInstallationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateInstallationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateInstallationRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateInstallationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateInstallationRequestPb pb = mapper.readValue(p, UpdateInstallationRequestPb.class); + return UpdateInstallationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequestPb.java new file mode 100755 index 000000000..3dc26e45a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequestPb.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateInstallationRequestPb { + @JsonProperty("installation") + private InstallationDetail installation; + + @JsonIgnore private String installationId; + + @JsonIgnore private String listingId; + + @JsonProperty("rotate_token") + private Boolean rotateToken; + + public UpdateInstallationRequestPb setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + public UpdateInstallationRequestPb setInstallationId(String installationId) { + this.installationId = installationId; + return this; + } + + public String getInstallationId() { + return installationId; + } + + public UpdateInstallationRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public UpdateInstallationRequestPb setRotateToken(Boolean rotateToken) { + this.rotateToken = rotateToken; + return this; + } + + public Boolean getRotateToken() { + return rotateToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInstallationRequestPb that = (UpdateInstallationRequestPb) o; + return Objects.equals(installation, that.installation) + && Objects.equals(installationId, that.installationId) + && Objects.equals(listingId, that.listingId) + && Objects.equals(rotateToken, that.rotateToken); + } + + @Override + public int hashCode() { + return Objects.hash(installation, installationId, listingId, rotateToken); + } + + @Override + public String toString() { + return new ToStringer(UpdateInstallationRequestPb.class) + .add("installation", installation) + .add("installationId", installationId) + .add("listingId", listingId) + .add("rotateToken", rotateToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java index df52570a6..acb209d3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateInstallationResponse.UpdateInstallationResponseSerializer.class) +@JsonDeserialize(using = UpdateInstallationResponse.UpdateInstallationResponseDeserializer.class) public class UpdateInstallationResponse { /** */ - @JsonProperty("installation") private InstallationDetail installation; public UpdateInstallationResponse setInstallation(InstallationDetail installation) { @@ -41,4 +51,41 @@ public String toString() { .add("installation", installation) .toString(); } + + UpdateInstallationResponsePb toPb() { + UpdateInstallationResponsePb pb = new UpdateInstallationResponsePb(); + pb.setInstallation(installation); + + return pb; + } + + static UpdateInstallationResponse fromPb(UpdateInstallationResponsePb pb) { + UpdateInstallationResponse model = new UpdateInstallationResponse(); + model.setInstallation(pb.getInstallation()); + + return model; + } + + public static class UpdateInstallationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateInstallationResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateInstallationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateInstallationResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateInstallationResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateInstallationResponsePb pb = mapper.readValue(p, UpdateInstallationResponsePb.class); + return UpdateInstallationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponsePb.java new file mode 100755 index 000000000..aed15ee0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateInstallationResponsePb { + @JsonProperty("installation") + private InstallationDetail installation; + + public UpdateInstallationResponsePb setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInstallationResponsePb that = (UpdateInstallationResponsePb) o; + return Objects.equals(installation, that.installation); + } + + @Override + public int hashCode() { + return Objects.hash(installation); + } + + @Override + public String toString() { + return new ToStringer(UpdateInstallationResponsePb.class) + .add("installation", installation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java index 8d0ca9f25..af07b1492 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateListingRequest.UpdateListingRequestSerializer.class) +@JsonDeserialize(using = UpdateListingRequest.UpdateListingRequestDeserializer.class) public class UpdateListingRequest { /** */ - @JsonIgnore private String id; + private String id; /** */ - @JsonProperty("listing") private Listing listing; public UpdateListingRequest setId(String id) { @@ -55,4 +64,42 @@ public String toString() { .add("listing", listing) .toString(); } + + UpdateListingRequestPb toPb() { + UpdateListingRequestPb pb = new UpdateListingRequestPb(); + pb.setId(id); + pb.setListing(listing); + + return pb; + } + + static UpdateListingRequest fromPb(UpdateListingRequestPb pb) { + UpdateListingRequest model = new UpdateListingRequest(); + model.setId(pb.getId()); + model.setListing(pb.getListing()); + + return model; + } + + public static class UpdateListingRequestSerializer extends JsonSerializer { + @Override + public void serialize( + UpdateListingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateListingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateListingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateListingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateListingRequestPb pb = mapper.readValue(p, UpdateListingRequestPb.class); + return UpdateListingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequestPb.java new file mode 100755 index 000000000..c3ccab5ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateListingRequestPb { + @JsonIgnore private String id; + + @JsonProperty("listing") + private Listing listing; + + public UpdateListingRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateListingRequestPb setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateListingRequestPb that = (UpdateListingRequestPb) o; + return Objects.equals(id, that.id) && Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(id, listing); + } + + @Override + public String toString() { + return new ToStringer(UpdateListingRequestPb.class) + .add("id", id) + .add("listing", listing) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java index 10c05956d..1522f2284 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateListingResponse.UpdateListingResponseSerializer.class) +@JsonDeserialize(using = UpdateListingResponse.UpdateListingResponseDeserializer.class) public class UpdateListingResponse { /** */ - @JsonProperty("listing") private Listing listing; public UpdateListingResponse setListing(Listing listing) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateListingResponse.class).add("listing", listing).toString(); } + + UpdateListingResponsePb toPb() { + UpdateListingResponsePb pb = new UpdateListingResponsePb(); + pb.setListing(listing); + + return pb; + } + + static UpdateListingResponse fromPb(UpdateListingResponsePb pb) { + UpdateListingResponse model = new UpdateListingResponse(); + model.setListing(pb.getListing()); + + return model; + } + + public static class UpdateListingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateListingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateListingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateListingResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateListingResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateListingResponsePb pb = mapper.readValue(p, UpdateListingResponsePb.class); + return UpdateListingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponsePb.java new file mode 100755 index 000000000..12eea5a78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateListingResponsePb { + @JsonProperty("listing") + private Listing listing; + + public UpdateListingResponsePb setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateListingResponsePb that = (UpdateListingResponsePb) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(UpdateListingResponsePb.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java index c0fa00f0c..01e63d8a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java @@ -4,28 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdatePersonalizationRequestRequest.UpdatePersonalizationRequestRequestSerializer.class) +@JsonDeserialize( + using = + UpdatePersonalizationRequestRequest.UpdatePersonalizationRequestRequestDeserializer.class) public class UpdatePersonalizationRequestRequest { /** */ - @JsonIgnore private String listingId; + private String listingId; /** */ - @JsonProperty("reason") private String reason; /** */ - @JsonIgnore private String requestId; + private String requestId; /** */ - @JsonProperty("share") private ShareInfo share; /** */ - @JsonProperty("status") private PersonalizationRequestStatus status; public UpdatePersonalizationRequestRequest setListingId(String listingId) { @@ -100,4 +110,50 @@ public String toString() { .add("status", status) .toString(); } + + UpdatePersonalizationRequestRequestPb toPb() { + UpdatePersonalizationRequestRequestPb pb = new UpdatePersonalizationRequestRequestPb(); + pb.setListingId(listingId); + pb.setReason(reason); + pb.setRequestId(requestId); + pb.setShare(share); + pb.setStatus(status); + + return pb; + } + + static UpdatePersonalizationRequestRequest fromPb(UpdatePersonalizationRequestRequestPb pb) { + UpdatePersonalizationRequestRequest model = new UpdatePersonalizationRequestRequest(); + model.setListingId(pb.getListingId()); + model.setReason(pb.getReason()); + model.setRequestId(pb.getRequestId()); + model.setShare(pb.getShare()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class UpdatePersonalizationRequestRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePersonalizationRequestRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePersonalizationRequestRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePersonalizationRequestRequestDeserializer + extends JsonDeserializer { + @Override + public UpdatePersonalizationRequestRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePersonalizationRequestRequestPb pb = + mapper.readValue(p, UpdatePersonalizationRequestRequestPb.class); + return UpdatePersonalizationRequestRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequestPb.java new file mode 100755 index 000000000..304b9fb90 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequestPb.java @@ -0,0 +1,98 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdatePersonalizationRequestRequestPb { + @JsonIgnore private String listingId; + + @JsonProperty("reason") + private String reason; + + @JsonIgnore private String requestId; + + @JsonProperty("share") + private ShareInfo share; + + @JsonProperty("status") + private PersonalizationRequestStatus status; + + public UpdatePersonalizationRequestRequestPb setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public UpdatePersonalizationRequestRequestPb setReason(String reason) { + this.reason = reason; + return this; + } + + public String getReason() { + return reason; + } + + public UpdatePersonalizationRequestRequestPb setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + public UpdatePersonalizationRequestRequestPb setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public UpdatePersonalizationRequestRequestPb setStatus(PersonalizationRequestStatus status) { + this.status = status; + return this; + } + + public PersonalizationRequestStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePersonalizationRequestRequestPb that = (UpdatePersonalizationRequestRequestPb) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(reason, that.reason) + && Objects.equals(requestId, that.requestId) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, reason, requestId, share, status); + } + + @Override + public String toString() { + return new ToStringer(UpdatePersonalizationRequestRequestPb.class) + .add("listingId", listingId) + .add("reason", reason) + .add("requestId", requestId) + .add("share", share) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java index 8a17caf4a..d6162c59e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java @@ -4,13 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdatePersonalizationRequestResponse.UpdatePersonalizationRequestResponseSerializer.class) +@JsonDeserialize( + using = + UpdatePersonalizationRequestResponse.UpdatePersonalizationRequestResponseDeserializer.class) public class UpdatePersonalizationRequestResponse { /** */ - @JsonProperty("request") private PersonalizationRequest request; public UpdatePersonalizationRequestResponse setRequest(PersonalizationRequest request) { @@ -41,4 +55,42 @@ public String toString() { .add("request", request) .toString(); } + + UpdatePersonalizationRequestResponsePb toPb() { + UpdatePersonalizationRequestResponsePb pb = new UpdatePersonalizationRequestResponsePb(); + pb.setRequest(request); + + return pb; + } + + static UpdatePersonalizationRequestResponse fromPb(UpdatePersonalizationRequestResponsePb pb) { + UpdatePersonalizationRequestResponse model = new UpdatePersonalizationRequestResponse(); + model.setRequest(pb.getRequest()); + + return model; + } + + public static class UpdatePersonalizationRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePersonalizationRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePersonalizationRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePersonalizationRequestResponseDeserializer + extends JsonDeserializer { + @Override + public UpdatePersonalizationRequestResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePersonalizationRequestResponsePb pb = + mapper.readValue(p, UpdatePersonalizationRequestResponsePb.class); + return UpdatePersonalizationRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponsePb.java new file mode 100755 index 000000000..82ec0d3e5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdatePersonalizationRequestResponsePb { + @JsonProperty("request") + private PersonalizationRequest request; + + public UpdatePersonalizationRequestResponsePb setRequest(PersonalizationRequest request) { + this.request = request; + return this; + } + + public PersonalizationRequest getRequest() { + return request; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePersonalizationRequestResponsePb that = (UpdatePersonalizationRequestResponsePb) o; + return Objects.equals(request, that.request); + } + + @Override + public int hashCode() { + return Objects.hash(request); + } + + @Override + public String toString() { + return new ToStringer(UpdatePersonalizationRequestResponsePb.class) + .add("request", request) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java index a82442990..a3a4e42fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java @@ -4,20 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateProviderAnalyticsDashboardRequest.UpdateProviderAnalyticsDashboardRequestSerializer + .class) +@JsonDeserialize( + using = + UpdateProviderAnalyticsDashboardRequest.UpdateProviderAnalyticsDashboardRequestDeserializer + .class) public class UpdateProviderAnalyticsDashboardRequest { /** id is immutable property and can't be updated. */ - @JsonIgnore private String id; + private String id; /** * this is the version of the dashboard template we want to update our user to current expectation * is that it should be equal to latest version of the dashboard template */ - @JsonProperty("version") private Long version; public UpdateProviderAnalyticsDashboardRequest setId(String id) { @@ -58,4 +73,47 @@ public String toString() { .add("version", version) .toString(); } + + UpdateProviderAnalyticsDashboardRequestPb toPb() { + UpdateProviderAnalyticsDashboardRequestPb pb = new UpdateProviderAnalyticsDashboardRequestPb(); + pb.setId(id); + pb.setVersion(version); + + return pb; + } + + static UpdateProviderAnalyticsDashboardRequest fromPb( + UpdateProviderAnalyticsDashboardRequestPb pb) { + UpdateProviderAnalyticsDashboardRequest model = new UpdateProviderAnalyticsDashboardRequest(); + model.setId(pb.getId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class UpdateProviderAnalyticsDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateProviderAnalyticsDashboardRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateProviderAnalyticsDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProviderAnalyticsDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateProviderAnalyticsDashboardRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProviderAnalyticsDashboardRequestPb pb = + mapper.readValue(p, UpdateProviderAnalyticsDashboardRequestPb.class); + return UpdateProviderAnalyticsDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequestPb.java new file mode 100755 index 000000000..068e8c75b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProviderAnalyticsDashboardRequestPb { + @JsonIgnore private String id; + + @JsonProperty("version") + private Long version; + + public UpdateProviderAnalyticsDashboardRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderAnalyticsDashboardRequestPb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderAnalyticsDashboardRequestPb that = (UpdateProviderAnalyticsDashboardRequestPb) o; + return Objects.equals(id, that.id) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(id, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderAnalyticsDashboardRequestPb.class) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java index e679a5aa9..9a6f71d4a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateProviderAnalyticsDashboardResponse.UpdateProviderAnalyticsDashboardResponseSerializer + .class) +@JsonDeserialize( + using = + UpdateProviderAnalyticsDashboardResponse + .UpdateProviderAnalyticsDashboardResponseDeserializer.class) public class UpdateProviderAnalyticsDashboardResponse { /** this is newly created Lakeview dashboard for the user */ - @JsonProperty("dashboard_id") private String dashboardId; /** id & version should be the same as the request */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("version") private Long version; public UpdateProviderAnalyticsDashboardResponse setDashboardId(String dashboardId) { @@ -71,4 +85,50 @@ public String toString() { .add("version", version) .toString(); } + + UpdateProviderAnalyticsDashboardResponsePb toPb() { + UpdateProviderAnalyticsDashboardResponsePb pb = + new UpdateProviderAnalyticsDashboardResponsePb(); + pb.setDashboardId(dashboardId); + pb.setId(id); + pb.setVersion(version); + + return pb; + } + + static UpdateProviderAnalyticsDashboardResponse fromPb( + UpdateProviderAnalyticsDashboardResponsePb pb) { + UpdateProviderAnalyticsDashboardResponse model = new UpdateProviderAnalyticsDashboardResponse(); + model.setDashboardId(pb.getDashboardId()); + model.setId(pb.getId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class UpdateProviderAnalyticsDashboardResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateProviderAnalyticsDashboardResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateProviderAnalyticsDashboardResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProviderAnalyticsDashboardResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateProviderAnalyticsDashboardResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProviderAnalyticsDashboardResponsePb pb = + mapper.readValue(p, UpdateProviderAnalyticsDashboardResponsePb.class); + return UpdateProviderAnalyticsDashboardResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponsePb.java new file mode 100755 index 000000000..e2f8b37ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProviderAnalyticsDashboardResponsePb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("id") + private String id; + + @JsonProperty("version") + private Long version; + + public UpdateProviderAnalyticsDashboardResponsePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public UpdateProviderAnalyticsDashboardResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderAnalyticsDashboardResponsePb setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderAnalyticsDashboardResponsePb that = + (UpdateProviderAnalyticsDashboardResponsePb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(id, that.id) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, id, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderAnalyticsDashboardResponsePb.class) + .add("dashboardId", dashboardId) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java index 19bac59a2..d9d3545a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateProviderRequest.UpdateProviderRequestSerializer.class) +@JsonDeserialize(using = UpdateProviderRequest.UpdateProviderRequestDeserializer.class) public class UpdateProviderRequest { /** */ - @JsonIgnore private String id; + private String id; /** */ - @JsonProperty("provider") private ProviderInfo provider; public UpdateProviderRequest setId(String id) { @@ -55,4 +64,43 @@ public String toString() { .add("provider", provider) .toString(); } + + UpdateProviderRequestPb toPb() { + UpdateProviderRequestPb pb = new UpdateProviderRequestPb(); + pb.setId(id); + pb.setProvider(provider); + + return pb; + } + + static UpdateProviderRequest fromPb(UpdateProviderRequestPb pb) { + UpdateProviderRequest model = new UpdateProviderRequest(); + model.setId(pb.getId()); + model.setProvider(pb.getProvider()); + + return model; + } + + public static class UpdateProviderRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProviderRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProviderRequestPb pb = mapper.readValue(p, UpdateProviderRequestPb.class); + return UpdateProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequestPb.java new file mode 100755 index 000000000..6b5e09822 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProviderRequestPb { + @JsonIgnore private String id; + + @JsonProperty("provider") + private ProviderInfo provider; + + public UpdateProviderRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderRequestPb setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderRequestPb that = (UpdateProviderRequestPb) o; + return Objects.equals(id, that.id) && Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(id, provider); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderRequestPb.class) + .add("id", id) + .add("provider", provider) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java index 81323258a..d33b5d657 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateProviderResponse.UpdateProviderResponseSerializer.class) +@JsonDeserialize(using = UpdateProviderResponse.UpdateProviderResponseDeserializer.class) public class UpdateProviderResponse { /** */ - @JsonProperty("provider") private ProviderInfo provider; public UpdateProviderResponse setProvider(ProviderInfo provider) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateProviderResponse.class).add("provider", provider).toString(); } + + UpdateProviderResponsePb toPb() { + UpdateProviderResponsePb pb = new UpdateProviderResponsePb(); + pb.setProvider(provider); + + return pb; + } + + static UpdateProviderResponse fromPb(UpdateProviderResponsePb pb) { + UpdateProviderResponse model = new UpdateProviderResponse(); + model.setProvider(pb.getProvider()); + + return model; + } + + public static class UpdateProviderResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateProviderResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateProviderResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProviderResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateProviderResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProviderResponsePb pb = mapper.readValue(p, UpdateProviderResponsePb.class); + return UpdateProviderResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponsePb.java new file mode 100755 index 000000000..480f43aa4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProviderResponsePb { + @JsonProperty("provider") + private ProviderInfo provider; + + public UpdateProviderResponsePb setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderResponsePb that = (UpdateProviderResponsePb) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderResponsePb.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java index 5a370dd43..0b993de73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Activity recorded for the action. */ @Generated +@JsonSerialize(using = Activity.ActivitySerializer.class) +@JsonDeserialize(using = Activity.ActivityDeserializer.class) public class Activity { /** * Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding @@ -25,15 +36,12 @@ public class Activity { *

* `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing * model versions in a stage. */ - @JsonProperty("activity_type") private ActivityType activityType; /** User-provided comment associated with the activity. */ - @JsonProperty("comment") private String comment; /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** @@ -47,15 +55,12 @@ public class Activity { * *

* `Archived`: Archived stage. */ - @JsonProperty("from_stage") private Stage fromStage; /** Unique identifier for the object. */ - @JsonProperty("id") private String id; /** Time of the object at last update, as a Unix timestamp in milliseconds. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** @@ -63,7 +68,6 @@ public class Activity { * usually describes a side effect, such as a version being archived as part of another version's * stage transition, and may not be returned for some activity types. */ - @JsonProperty("system_comment") private String systemComment; /** @@ -77,11 +81,9 @@ public class Activity { * *

* `Archived`: Archived stage. */ - @JsonProperty("to_stage") private Stage toStage; /** The username of the user that created the object. */ - @JsonProperty("user_id") private String userId; public Activity setActivityType(ActivityType activityType) { @@ -209,4 +211,53 @@ public String toString() { .add("userId", userId) .toString(); } + + ActivityPb toPb() { + ActivityPb pb = new ActivityPb(); + pb.setActivityType(activityType); + pb.setComment(comment); + pb.setCreationTimestamp(creationTimestamp); + pb.setFromStage(fromStage); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setSystemComment(systemComment); + pb.setToStage(toStage); + pb.setUserId(userId); + + return pb; + } + + static Activity fromPb(ActivityPb pb) { + Activity model = new Activity(); + model.setActivityType(pb.getActivityType()); + model.setComment(pb.getComment()); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setFromStage(pb.getFromStage()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setSystemComment(pb.getSystemComment()); + model.setToStage(pb.getToStage()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class ActivitySerializer extends JsonSerializer { + @Override + public void serialize(Activity value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ActivityPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ActivityDeserializer extends JsonDeserializer { + @Override + public Activity deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ActivityPb pb = mapper.readValue(p, ActivityPb.class); + return Activity.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityPb.java new file mode 100755 index 000000000..f7b9c7733 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Activity recorded for the action. */ +@Generated +class ActivityPb { + @JsonProperty("activity_type") + private ActivityType activityType; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("from_stage") + private Stage fromStage; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("system_comment") + private String systemComment; + + @JsonProperty("to_stage") + private Stage toStage; + + @JsonProperty("user_id") + private String userId; + + public ActivityPb setActivityType(ActivityType activityType) { + this.activityType = activityType; + return this; + } + + public ActivityType getActivityType() { + return activityType; + } + + public ActivityPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ActivityPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ActivityPb setFromStage(Stage fromStage) { + this.fromStage = fromStage; + return this; + } + + public Stage getFromStage() { + return fromStage; + } + + public ActivityPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ActivityPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ActivityPb setSystemComment(String systemComment) { + this.systemComment = systemComment; + return this; + } + + public String getSystemComment() { + return systemComment; + } + + public ActivityPb setToStage(Stage toStage) { + this.toStage = toStage; + return this; + } + + public Stage getToStage() { + return toStage; + } + + public ActivityPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ActivityPb that = (ActivityPb) o; + return Objects.equals(activityType, that.activityType) + && Objects.equals(comment, that.comment) + && Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(fromStage, that.fromStage) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(systemComment, that.systemComment) + && Objects.equals(toStage, that.toStage) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + activityType, + comment, + creationTimestamp, + fromStage, + id, + lastUpdatedTimestamp, + systemComment, + toStage, + userId); + } + + @Override + public String toString() { + return new ToStringer(ActivityPb.class) + .add("activityType", activityType) + .add("comment", comment) + .add("creationTimestamp", creationTimestamp) + .add("fromStage", fromStage) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("systemComment", systemComment) + .add("toStage", toStage) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java index 2f1b3780c..da35f8513 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ApproveTransitionRequest.ApproveTransitionRequestSerializer.class) +@JsonDeserialize(using = ApproveTransitionRequest.ApproveTransitionRequestDeserializer.class) public class ApproveTransitionRequest { /** Specifies whether to archive all current model versions in the target stage. */ - @JsonProperty("archive_existing_versions") private Boolean archiveExistingVersions; /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Name of the model. */ - @JsonProperty("name") private String name; /** @@ -32,11 +40,9 @@ public class ApproveTransitionRequest { * *

* `Archived`: Archived stage. */ - @JsonProperty("stage") private Stage stage; /** Version of the model. */ - @JsonProperty("version") private String version; public ApproveTransitionRequest setArchiveExistingVersions(Boolean archiveExistingVersions) { @@ -111,4 +117,49 @@ public String toString() { .add("version", version) .toString(); } + + ApproveTransitionRequestPb toPb() { + ApproveTransitionRequestPb pb = new ApproveTransitionRequestPb(); + pb.setArchiveExistingVersions(archiveExistingVersions); + pb.setComment(comment); + pb.setName(name); + pb.setStage(stage); + pb.setVersion(version); + + return pb; + } + + static ApproveTransitionRequest fromPb(ApproveTransitionRequestPb pb) { + ApproveTransitionRequest model = new ApproveTransitionRequest(); + model.setArchiveExistingVersions(pb.getArchiveExistingVersions()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setStage(pb.getStage()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ApproveTransitionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ApproveTransitionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ApproveTransitionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ApproveTransitionRequestDeserializer + extends JsonDeserializer { + @Override + public ApproveTransitionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ApproveTransitionRequestPb pb = mapper.readValue(p, ApproveTransitionRequestPb.class); + return ApproveTransitionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestPb.java new file mode 100755 index 000000000..ec1223215 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ApproveTransitionRequestPb { + @JsonProperty("archive_existing_versions") + private Boolean archiveExistingVersions; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("stage") + private Stage stage; + + @JsonProperty("version") + private String version; + + public ApproveTransitionRequestPb setArchiveExistingVersions(Boolean archiveExistingVersions) { + this.archiveExistingVersions = archiveExistingVersions; + return this; + } + + public Boolean getArchiveExistingVersions() { + return archiveExistingVersions; + } + + public ApproveTransitionRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ApproveTransitionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ApproveTransitionRequestPb setStage(Stage stage) { + this.stage = stage; + return this; + } + + public Stage getStage() { + return stage; + } + + public ApproveTransitionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApproveTransitionRequestPb that = (ApproveTransitionRequestPb) o; + return Objects.equals(archiveExistingVersions, that.archiveExistingVersions) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(stage, that.stage) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(archiveExistingVersions, comment, name, stage, version); + } + + @Override + public String toString() { + return new ToStringer(ApproveTransitionRequestPb.class) + .add("archiveExistingVersions", archiveExistingVersions) + .add("comment", comment) + .add("name", name) + .add("stage", stage) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java index 3bf4ef6b8..b66f18869 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ApproveTransitionRequestResponse.ApproveTransitionRequestResponseSerializer.class) +@JsonDeserialize( + using = ApproveTransitionRequestResponse.ApproveTransitionRequestResponseDeserializer.class) public class ApproveTransitionRequestResponse { /** Activity recorded for the action. */ - @JsonProperty("activity") private Activity activity; public ApproveTransitionRequestResponse setActivity(Activity activity) { @@ -41,4 +53,42 @@ public String toString() { .add("activity", activity) .toString(); } + + ApproveTransitionRequestResponsePb toPb() { + ApproveTransitionRequestResponsePb pb = new ApproveTransitionRequestResponsePb(); + pb.setActivity(activity); + + return pb; + } + + static ApproveTransitionRequestResponse fromPb(ApproveTransitionRequestResponsePb pb) { + ApproveTransitionRequestResponse model = new ApproveTransitionRequestResponse(); + model.setActivity(pb.getActivity()); + + return model; + } + + public static class ApproveTransitionRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ApproveTransitionRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ApproveTransitionRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ApproveTransitionRequestResponseDeserializer + extends JsonDeserializer { + @Override + public ApproveTransitionRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ApproveTransitionRequestResponsePb pb = + mapper.readValue(p, ApproveTransitionRequestResponsePb.class); + return ApproveTransitionRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponsePb.java new file mode 100755 index 000000000..b5d600337 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ApproveTransitionRequestResponsePb { + @JsonProperty("activity") + private Activity activity; + + public ApproveTransitionRequestResponsePb setActivity(Activity activity) { + this.activity = activity; + return this; + } + + public Activity getActivity() { + return activity; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApproveTransitionRequestResponsePb that = (ApproveTransitionRequestResponsePb) o; + return Objects.equals(activity, that.activity); + } + + @Override + public int hashCode() { + return Objects.hash(activity); + } + + @Override + public String toString() { + return new ToStringer(ApproveTransitionRequestResponsePb.class) + .add("activity", activity) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java deleted file mode 100755 index 7f57da157..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ArtifactCredentialInfo { - /** - * A collection of HTTP headers that should be specified when uploading to or downloading from the - * specified `signed_uri`. - */ - @JsonProperty("headers") - private Collection headers; - - /** - * The path, relative to the Run's artifact root location, of the artifact that can be accessed - * with the credential. - */ - @JsonProperty("path") - private String path; - - /** The ID of the MLflow Run containing the artifact that can be accessed with the credential. */ - @JsonProperty("run_id") - private String runId; - - /** The signed URI credential that provides access to the artifact. */ - @JsonProperty("signed_uri") - private String signedUri; - - /** - * The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access - * Signature URI). - */ - @JsonProperty("type") - private ArtifactCredentialType typeValue; - - public ArtifactCredentialInfo setHeaders(Collection headers) { - this.headers = headers; - return this; - } - - public Collection getHeaders() { - return headers; - } - - public ArtifactCredentialInfo setPath(String path) { - this.path = path; - return this; - } - - public String getPath() { - return path; - } - - public ArtifactCredentialInfo setRunId(String runId) { - this.runId = runId; - return this; - } - - public String getRunId() { - return runId; - } - - public ArtifactCredentialInfo setSignedUri(String signedUri) { - this.signedUri = signedUri; - return this; - } - - public String getSignedUri() { - return signedUri; - } - - public ArtifactCredentialInfo setType(ArtifactCredentialType typeValue) { - this.typeValue = typeValue; - return this; - } - - public ArtifactCredentialType getType() { - return typeValue; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfo that = (ArtifactCredentialInfo) o; - return Objects.equals(headers, that.headers) - && Objects.equals(path, that.path) - && Objects.equals(runId, that.runId) - && Objects.equals(signedUri, that.signedUri) - && Objects.equals(typeValue, that.typeValue); - } - - @Override - public int hashCode() { - return Objects.hash(headers, path, runId, signedUri, typeValue); - } - - @Override - public String toString() { - return new ToStringer(ArtifactCredentialInfo.class) - .add("headers", headers) - .add("path", path) - .add("runId", runId) - .add("signedUri", signedUri) - .add("typeValue", typeValue) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java deleted file mode 100755 index ec4cf4370..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; - -/** The type of a given artifact access credential */ -@Generated -public enum ArtifactCredentialType { - AWS_PRESIGNED_URL, - AZURE_ADLS_GEN2_SAS_URI, - AZURE_SAS_URI, - GCP_SIGNED_URL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java index bc0cac89e..5d7c579c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java @@ -4,35 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Comment details. */ @Generated +@JsonSerialize(using = CommentObject.CommentObjectSerializer.class) +@JsonDeserialize(using = CommentObject.CommentObjectDeserializer.class) public class CommentObject { /** Array of actions on the activity allowed for the current viewer. */ - @JsonProperty("available_actions") private Collection availableActions; /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** Comment ID */ - @JsonProperty("id") private String id; /** Time of the object at last update, as a Unix timestamp in milliseconds. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** The username of the user that created the object. */ - @JsonProperty("user_id") private String userId; public CommentObject setAvailableActions(Collection availableActions) { @@ -119,4 +124,47 @@ public String toString() { .add("userId", userId) .toString(); } + + CommentObjectPb toPb() { + CommentObjectPb pb = new CommentObjectPb(); + pb.setAvailableActions(availableActions); + pb.setComment(comment); + pb.setCreationTimestamp(creationTimestamp); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setUserId(userId); + + return pb; + } + + static CommentObject fromPb(CommentObjectPb pb) { + CommentObject model = new CommentObject(); + model.setAvailableActions(pb.getAvailableActions()); + model.setComment(pb.getComment()); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class CommentObjectSerializer extends JsonSerializer { + @Override + public void serialize(CommentObject value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CommentObjectPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CommentObjectDeserializer extends JsonDeserializer { + @Override + public CommentObject deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CommentObjectPb pb = mapper.readValue(p, CommentObjectPb.class); + return CommentObject.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObjectPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObjectPb.java new file mode 100755 index 000000000..2cf91a85e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObjectPb.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Comment details. */ +@Generated +class CommentObjectPb { + @JsonProperty("available_actions") + private Collection availableActions; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("user_id") + private String userId; + + public CommentObjectPb setAvailableActions(Collection availableActions) { + this.availableActions = availableActions; + return this; + } + + public Collection getAvailableActions() { + return availableActions; + } + + public CommentObjectPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CommentObjectPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public CommentObjectPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CommentObjectPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public CommentObjectPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CommentObjectPb that = (CommentObjectPb) o; + return Objects.equals(availableActions, that.availableActions) + && Objects.equals(comment, that.comment) + && Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + availableActions, comment, creationTimestamp, id, lastUpdatedTimestamp, userId); + } + + @Override + public String toString() { + return new ToStringer(CommentObjectPb.class) + .add("availableActions", availableActions) + .add("comment", comment) + .add("creationTimestamp", creationTimestamp) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Converters.java new file mode 100755 index 000000000..bc7995be4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.ml; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java index fd7f66eb3..5b6e78502 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateComment.CreateCommentSerializer.class) +@JsonDeserialize(using = CreateComment.CreateCommentDeserializer.class) public class CreateComment { /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Name of the model. */ - @JsonProperty("name") private String name; /** Version of the model. */ - @JsonProperty("version") private String version; public CreateComment setComment(String comment) { @@ -71,4 +79,41 @@ public String toString() { .add("version", version) .toString(); } + + CreateCommentPb toPb() { + CreateCommentPb pb = new CreateCommentPb(); + pb.setComment(comment); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static CreateComment fromPb(CreateCommentPb pb) { + CreateComment model = new CreateComment(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class CreateCommentSerializer extends JsonSerializer { + @Override + public void serialize(CreateComment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCommentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCommentDeserializer extends JsonDeserializer { + @Override + public CreateComment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCommentPb pb = mapper.readValue(p, CreateCommentPb.class); + return CreateComment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentPb.java new file mode 100755 index 000000000..4a08ccde7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCommentPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("version") + private String version; + + public CreateCommentPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateCommentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCommentPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCommentPb that = (CreateCommentPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, version); + } + + @Override + public String toString() { + return new ToStringer(CreateCommentPb.class) + .add("comment", comment) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java index 6387f9817..8172f3562 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCommentResponse.CreateCommentResponseSerializer.class) +@JsonDeserialize(using = CreateCommentResponse.CreateCommentResponseDeserializer.class) public class CreateCommentResponse { /** Comment details. */ - @JsonProperty("comment") private CommentObject comment; public CreateCommentResponse setComment(CommentObject comment) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateCommentResponse.class).add("comment", comment).toString(); } + + CreateCommentResponsePb toPb() { + CreateCommentResponsePb pb = new CreateCommentResponsePb(); + pb.setComment(comment); + + return pb; + } + + static CreateCommentResponse fromPb(CreateCommentResponsePb pb) { + CreateCommentResponse model = new CreateCommentResponse(); + model.setComment(pb.getComment()); + + return model; + } + + public static class CreateCommentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCommentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCommentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCommentResponseDeserializer + extends JsonDeserializer { + @Override + public CreateCommentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCommentResponsePb pb = mapper.readValue(p, CreateCommentResponsePb.class); + return CreateCommentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponsePb.java new file mode 100755 index 000000000..70bb9c609 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCommentResponsePb { + @JsonProperty("comment") + private CommentObject comment; + + public CreateCommentResponsePb setComment(CommentObject comment) { + this.comment = comment; + return this; + } + + public CommentObject getComment() { + return comment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCommentResponsePb that = (CreateCommentResponsePb) o; + return Objects.equals(comment, that.comment); + } + + @Override + public int hashCode() { + return Objects.hash(comment); + } + + @Override + public String toString() { + return new ToStringer(CreateCommentResponsePb.class).add("comment", comment).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java index dfc04e7da..b9ee99bf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExperiment.CreateExperimentSerializer.class) +@JsonDeserialize(using = CreateExperiment.CreateExperimentDeserializer.class) public class CreateExperiment { /** * Location where all artifacts for the experiment are stored. If not provided, the remote server * will select an appropriate default. */ - @JsonProperty("artifact_location") private String artifactLocation; /** Experiment name. */ - @JsonProperty("name") private String name; /** @@ -27,7 +36,6 @@ public class CreateExperiment { * 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also * guaranteed to support up to 20 tags per request. */ - @JsonProperty("tags") private Collection tags; public CreateExperiment setArtifactLocation(String artifactLocation) { @@ -80,4 +88,42 @@ public String toString() { .add("tags", tags) .toString(); } + + CreateExperimentPb toPb() { + CreateExperimentPb pb = new CreateExperimentPb(); + pb.setArtifactLocation(artifactLocation); + pb.setName(name); + pb.setTags(tags); + + return pb; + } + + static CreateExperiment fromPb(CreateExperimentPb pb) { + CreateExperiment model = new CreateExperiment(); + model.setArtifactLocation(pb.getArtifactLocation()); + model.setName(pb.getName()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreateExperimentSerializer extends JsonSerializer { + @Override + public void serialize(CreateExperiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExperimentDeserializer extends JsonDeserializer { + @Override + public CreateExperiment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExperimentPb pb = mapper.readValue(p, CreateExperimentPb.class); + return CreateExperiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentPb.java new file mode 100755 index 000000000..ae7271094 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateExperimentPb { + @JsonProperty("artifact_location") + private String artifactLocation; + + @JsonProperty("name") + private String name; + + @JsonProperty("tags") + private Collection tags; + + public CreateExperimentPb setArtifactLocation(String artifactLocation) { + this.artifactLocation = artifactLocation; + return this; + } + + public String getArtifactLocation() { + return artifactLocation; + } + + public CreateExperimentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateExperimentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExperimentPb that = (CreateExperimentPb) o; + return Objects.equals(artifactLocation, that.artifactLocation) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(artifactLocation, name, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateExperimentPb.class) + .add("artifactLocation", artifactLocation) + .add("name", name) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java index e56f99604..19e7552a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateExperimentResponse.CreateExperimentResponseSerializer.class) +@JsonDeserialize(using = CreateExperimentResponse.CreateExperimentResponseDeserializer.class) public class CreateExperimentResponse { /** Unique identifier for the experiment. */ - @JsonProperty("experiment_id") private String experimentId; public CreateExperimentResponse setExperimentId(String experimentId) { @@ -41,4 +51,41 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + CreateExperimentResponsePb toPb() { + CreateExperimentResponsePb pb = new CreateExperimentResponsePb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static CreateExperimentResponse fromPb(CreateExperimentResponsePb pb) { + CreateExperimentResponse model = new CreateExperimentResponse(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class CreateExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public CreateExperimentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateExperimentResponsePb pb = mapper.readValue(p, CreateExperimentResponsePb.class); + return CreateExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponsePb.java new file mode 100755 index 000000000..deea8a4df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateExperimentResponsePb { + @JsonProperty("experiment_id") + private String experimentId; + + public CreateExperimentResponsePb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExperimentResponsePb that = (CreateExperimentResponsePb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(CreateExperimentResponsePb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java index 6d05acda5..32b65b1e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateForecastingExperimentRequest.CreateForecastingExperimentRequestSerializer.class) +@JsonDeserialize( + using = CreateForecastingExperimentRequest.CreateForecastingExperimentRequestDeserializer.class) public class CreateForecastingExperimentRequest { /** The column in the training table used to customize weights for each time series. */ - @JsonProperty("custom_weights_column") private String customWeightsColumn; /** The path in the workspace to store the created experiment. */ - @JsonProperty("experiment_path") private String experimentPath; /** @@ -23,27 +34,23 @@ public class CreateForecastingExperimentRequest { * second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', * 'Weekly', 'Monthly', 'Quarterly', 'Yearly'. */ - @JsonProperty("forecast_granularity") private String forecastGranularity; /** * The number of time steps into the future to make predictions, calculated as a multiple of * forecast_granularity. This value represents how far ahead the model should forecast. */ - @JsonProperty("forecast_horizon") private Long forecastHorizon; /** * The fully qualified path of a Unity Catalog table, formatted as * catalog_name.schema_name.table_name, used to store future feature data for predictions. */ - @JsonProperty("future_feature_data_path") private String futureFeatureDataPath; /** * The region code(s) to automatically add holiday features. Currently supports only one region. */ - @JsonProperty("holiday_regions") private Collection holidayRegions; /** @@ -53,71 +60,60 @@ public class CreateForecastingExperimentRequest { * excluded: split_column, target_column, custom_weights_column. - Automatically included: * time_column. */ - @JsonProperty("include_features") private Collection includeFeatures; /** * The maximum duration for the experiment in minutes. The experiment stops automatically if it * exceeds this limit. */ - @JsonProperty("max_runtime") private Long maxRuntime; /** * The fully qualified path of a Unity Catalog table, formatted as * catalog_name.schema_name.table_name, used to store predictions. */ - @JsonProperty("prediction_data_path") private String predictionDataPath; /** The evaluation metric used to optimize the forecasting model. */ - @JsonProperty("primary_metric") private String primaryMetric; /** * The fully qualified path of a Unity Catalog model, formatted as * catalog_name.schema_name.model_name, used to store the best model. */ - @JsonProperty("register_to") private String registerTo; /** * // The column in the training table used for custom data splits. Values must be 'train', * 'validate', or 'test'. */ - @JsonProperty("split_column") private String splitColumn; /** * The column in the input training table used as the prediction target for model training. The * values in this column are used as the ground truth for model training. */ - @JsonProperty("target_column") private String targetColumn; /** The column in the input training table that represents each row's timestamp. */ - @JsonProperty("time_column") private String timeColumn; /** * The column in the training table used to group the dataset for predicting individual time * series. */ - @JsonProperty("timeseries_identifier_columns") private Collection timeseriesIdentifierColumns; /** * The fully qualified path of a Unity Catalog table, formatted as * catalog_name.schema_name.table_name, used as training data for the forecasting model. */ - @JsonProperty("train_data_path") private String trainDataPath; /** * List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', * 'DeepAR'. An empty list includes all supported frameworks. */ - @JsonProperty("training_frameworks") private Collection trainingFrameworks; public CreateForecastingExperimentRequest setCustomWeightsColumn(String customWeightsColumn) { @@ -343,4 +339,74 @@ public String toString() { .add("trainingFrameworks", trainingFrameworks) .toString(); } + + CreateForecastingExperimentRequestPb toPb() { + CreateForecastingExperimentRequestPb pb = new CreateForecastingExperimentRequestPb(); + pb.setCustomWeightsColumn(customWeightsColumn); + pb.setExperimentPath(experimentPath); + pb.setForecastGranularity(forecastGranularity); + pb.setForecastHorizon(forecastHorizon); + pb.setFutureFeatureDataPath(futureFeatureDataPath); + pb.setHolidayRegions(holidayRegions); + pb.setIncludeFeatures(includeFeatures); + pb.setMaxRuntime(maxRuntime); + pb.setPredictionDataPath(predictionDataPath); + pb.setPrimaryMetric(primaryMetric); + pb.setRegisterTo(registerTo); + pb.setSplitColumn(splitColumn); + pb.setTargetColumn(targetColumn); + pb.setTimeColumn(timeColumn); + pb.setTimeseriesIdentifierColumns(timeseriesIdentifierColumns); + pb.setTrainDataPath(trainDataPath); + pb.setTrainingFrameworks(trainingFrameworks); + + return pb; + } + + static CreateForecastingExperimentRequest fromPb(CreateForecastingExperimentRequestPb pb) { + CreateForecastingExperimentRequest model = new CreateForecastingExperimentRequest(); + model.setCustomWeightsColumn(pb.getCustomWeightsColumn()); + model.setExperimentPath(pb.getExperimentPath()); + model.setForecastGranularity(pb.getForecastGranularity()); + model.setForecastHorizon(pb.getForecastHorizon()); + model.setFutureFeatureDataPath(pb.getFutureFeatureDataPath()); + model.setHolidayRegions(pb.getHolidayRegions()); + model.setIncludeFeatures(pb.getIncludeFeatures()); + model.setMaxRuntime(pb.getMaxRuntime()); + model.setPredictionDataPath(pb.getPredictionDataPath()); + model.setPrimaryMetric(pb.getPrimaryMetric()); + model.setRegisterTo(pb.getRegisterTo()); + model.setSplitColumn(pb.getSplitColumn()); + model.setTargetColumn(pb.getTargetColumn()); + model.setTimeColumn(pb.getTimeColumn()); + model.setTimeseriesIdentifierColumns(pb.getTimeseriesIdentifierColumns()); + model.setTrainDataPath(pb.getTrainDataPath()); + model.setTrainingFrameworks(pb.getTrainingFrameworks()); + + return model; + } + + public static class CreateForecastingExperimentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateForecastingExperimentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateForecastingExperimentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateForecastingExperimentRequestDeserializer + extends JsonDeserializer { + @Override + public CreateForecastingExperimentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateForecastingExperimentRequestPb pb = + mapper.readValue(p, CreateForecastingExperimentRequestPb.class); + return CreateForecastingExperimentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequestPb.java new file mode 100755 index 000000000..d13e5686d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequestPb.java @@ -0,0 +1,289 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateForecastingExperimentRequestPb { + @JsonProperty("custom_weights_column") + private String customWeightsColumn; + + @JsonProperty("experiment_path") + private String experimentPath; + + @JsonProperty("forecast_granularity") + private String forecastGranularity; + + @JsonProperty("forecast_horizon") + private Long forecastHorizon; + + @JsonProperty("future_feature_data_path") + private String futureFeatureDataPath; + + @JsonProperty("holiday_regions") + private Collection holidayRegions; + + @JsonProperty("include_features") + private Collection includeFeatures; + + @JsonProperty("max_runtime") + private Long maxRuntime; + + @JsonProperty("prediction_data_path") + private String predictionDataPath; + + @JsonProperty("primary_metric") + private String primaryMetric; + + @JsonProperty("register_to") + private String registerTo; + + @JsonProperty("split_column") + private String splitColumn; + + @JsonProperty("target_column") + private String targetColumn; + + @JsonProperty("time_column") + private String timeColumn; + + @JsonProperty("timeseries_identifier_columns") + private Collection timeseriesIdentifierColumns; + + @JsonProperty("train_data_path") + private String trainDataPath; + + @JsonProperty("training_frameworks") + private Collection trainingFrameworks; + + public CreateForecastingExperimentRequestPb setCustomWeightsColumn(String customWeightsColumn) { + this.customWeightsColumn = customWeightsColumn; + return this; + } + + public String getCustomWeightsColumn() { + return customWeightsColumn; + } + + public CreateForecastingExperimentRequestPb setExperimentPath(String experimentPath) { + this.experimentPath = experimentPath; + return this; + } + + public String getExperimentPath() { + return experimentPath; + } + + public CreateForecastingExperimentRequestPb setForecastGranularity(String forecastGranularity) { + this.forecastGranularity = forecastGranularity; + return this; + } + + public String getForecastGranularity() { + return forecastGranularity; + } + + public CreateForecastingExperimentRequestPb setForecastHorizon(Long forecastHorizon) { + this.forecastHorizon = forecastHorizon; + return this; + } + + public Long getForecastHorizon() { + return forecastHorizon; + } + + public CreateForecastingExperimentRequestPb setFutureFeatureDataPath( + String futureFeatureDataPath) { + this.futureFeatureDataPath = futureFeatureDataPath; + return this; + } + + public String getFutureFeatureDataPath() { + return futureFeatureDataPath; + } + + public CreateForecastingExperimentRequestPb setHolidayRegions(Collection holidayRegions) { + this.holidayRegions = holidayRegions; + return this; + } + + public Collection getHolidayRegions() { + return holidayRegions; + } + + public CreateForecastingExperimentRequestPb setIncludeFeatures( + Collection includeFeatures) { + this.includeFeatures = includeFeatures; + return this; + } + + public Collection getIncludeFeatures() { + return includeFeatures; + } + + public CreateForecastingExperimentRequestPb setMaxRuntime(Long maxRuntime) { + this.maxRuntime = maxRuntime; + return this; + } + + public Long getMaxRuntime() { + return maxRuntime; + } + + public CreateForecastingExperimentRequestPb setPredictionDataPath(String predictionDataPath) { + this.predictionDataPath = predictionDataPath; + return this; + } + + public String getPredictionDataPath() { + return predictionDataPath; + } + + public CreateForecastingExperimentRequestPb setPrimaryMetric(String primaryMetric) { + this.primaryMetric = primaryMetric; + return this; + } + + public String getPrimaryMetric() { + return primaryMetric; + } + + public CreateForecastingExperimentRequestPb setRegisterTo(String registerTo) { + this.registerTo = registerTo; + return this; + } + + public String getRegisterTo() { + return registerTo; + } + + public CreateForecastingExperimentRequestPb setSplitColumn(String splitColumn) { + this.splitColumn = splitColumn; + return this; + } + + public String getSplitColumn() { + return splitColumn; + } + + public CreateForecastingExperimentRequestPb setTargetColumn(String targetColumn) { + this.targetColumn = targetColumn; + return this; + } + + public String getTargetColumn() { + return targetColumn; + } + + public CreateForecastingExperimentRequestPb setTimeColumn(String timeColumn) { + this.timeColumn = timeColumn; + return this; + } + + public String getTimeColumn() { + return timeColumn; + } + + public CreateForecastingExperimentRequestPb setTimeseriesIdentifierColumns( + Collection timeseriesIdentifierColumns) { + this.timeseriesIdentifierColumns = timeseriesIdentifierColumns; + return this; + } + + public Collection getTimeseriesIdentifierColumns() { + return timeseriesIdentifierColumns; + } + + public CreateForecastingExperimentRequestPb setTrainDataPath(String trainDataPath) { + this.trainDataPath = trainDataPath; + return this; + } + + public String getTrainDataPath() { + return trainDataPath; + } + + public CreateForecastingExperimentRequestPb setTrainingFrameworks( + Collection trainingFrameworks) { + this.trainingFrameworks = trainingFrameworks; + return this; + } + + public Collection getTrainingFrameworks() { + return trainingFrameworks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateForecastingExperimentRequestPb that = (CreateForecastingExperimentRequestPb) o; + return Objects.equals(customWeightsColumn, that.customWeightsColumn) + && Objects.equals(experimentPath, that.experimentPath) + && Objects.equals(forecastGranularity, that.forecastGranularity) + && Objects.equals(forecastHorizon, that.forecastHorizon) + && Objects.equals(futureFeatureDataPath, that.futureFeatureDataPath) + && Objects.equals(holidayRegions, that.holidayRegions) + && Objects.equals(includeFeatures, that.includeFeatures) + && Objects.equals(maxRuntime, that.maxRuntime) + && Objects.equals(predictionDataPath, that.predictionDataPath) + && Objects.equals(primaryMetric, that.primaryMetric) + && Objects.equals(registerTo, that.registerTo) + && Objects.equals(splitColumn, that.splitColumn) + && Objects.equals(targetColumn, that.targetColumn) + && Objects.equals(timeColumn, that.timeColumn) + && Objects.equals(timeseriesIdentifierColumns, that.timeseriesIdentifierColumns) + && Objects.equals(trainDataPath, that.trainDataPath) + && Objects.equals(trainingFrameworks, that.trainingFrameworks); + } + + @Override + public int hashCode() { + return Objects.hash( + customWeightsColumn, + experimentPath, + forecastGranularity, + forecastHorizon, + futureFeatureDataPath, + holidayRegions, + includeFeatures, + maxRuntime, + predictionDataPath, + primaryMetric, + registerTo, + splitColumn, + targetColumn, + timeColumn, + timeseriesIdentifierColumns, + trainDataPath, + trainingFrameworks); + } + + @Override + public String toString() { + return new ToStringer(CreateForecastingExperimentRequestPb.class) + .add("customWeightsColumn", customWeightsColumn) + .add("experimentPath", experimentPath) + .add("forecastGranularity", forecastGranularity) + .add("forecastHorizon", forecastHorizon) + .add("futureFeatureDataPath", futureFeatureDataPath) + .add("holidayRegions", holidayRegions) + .add("includeFeatures", includeFeatures) + .add("maxRuntime", maxRuntime) + .add("predictionDataPath", predictionDataPath) + .add("primaryMetric", primaryMetric) + .add("registerTo", registerTo) + .add("splitColumn", splitColumn) + .add("targetColumn", targetColumn) + .add("timeColumn", timeColumn) + .add("timeseriesIdentifierColumns", timeseriesIdentifierColumns) + .add("trainDataPath", trainDataPath) + .add("trainingFrameworks", trainingFrameworks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java index 08dc1960f..ee2411b38 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java @@ -4,13 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateForecastingExperimentResponse.CreateForecastingExperimentResponseSerializer.class) +@JsonDeserialize( + using = + CreateForecastingExperimentResponse.CreateForecastingExperimentResponseDeserializer.class) public class CreateForecastingExperimentResponse { /** The unique ID of the created forecasting experiment */ - @JsonProperty("experiment_id") private String experimentId; public CreateForecastingExperimentResponse setExperimentId(String experimentId) { @@ -41,4 +54,42 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + CreateForecastingExperimentResponsePb toPb() { + CreateForecastingExperimentResponsePb pb = new CreateForecastingExperimentResponsePb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static CreateForecastingExperimentResponse fromPb(CreateForecastingExperimentResponsePb pb) { + CreateForecastingExperimentResponse model = new CreateForecastingExperimentResponse(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class CreateForecastingExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateForecastingExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateForecastingExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateForecastingExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public CreateForecastingExperimentResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateForecastingExperimentResponsePb pb = + mapper.readValue(p, CreateForecastingExperimentResponsePb.class); + return CreateForecastingExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponsePb.java new file mode 100755 index 000000000..e21e933c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateForecastingExperimentResponsePb { + @JsonProperty("experiment_id") + private String experimentId; + + public CreateForecastingExperimentResponsePb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateForecastingExperimentResponsePb that = (CreateForecastingExperimentResponsePb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(CreateForecastingExperimentResponsePb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java index 63308f48e..f7f9c40a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateLoggedModelRequest.CreateLoggedModelRequestSerializer.class) +@JsonDeserialize(using = CreateLoggedModelRequest.CreateLoggedModelRequestDeserializer.class) public class CreateLoggedModelRequest { /** The ID of the experiment that owns the model. */ - @JsonProperty("experiment_id") private String experimentId; /** The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``. */ - @JsonProperty("model_type") private String modelType; /** The name of the model (optional). If not specified one will be generated. */ - @JsonProperty("name") private String name; /** Parameters attached to the model. */ - @JsonProperty("params") private Collection params; /** The ID of the run that created the model. */ - @JsonProperty("source_run_id") private String sourceRunId; /** Tags attached to the model. */ - @JsonProperty("tags") private Collection tags; public CreateLoggedModelRequest setExperimentId(String experimentId) { @@ -117,4 +122,51 @@ public String toString() { .add("tags", tags) .toString(); } + + CreateLoggedModelRequestPb toPb() { + CreateLoggedModelRequestPb pb = new CreateLoggedModelRequestPb(); + pb.setExperimentId(experimentId); + pb.setModelType(modelType); + pb.setName(name); + pb.setParams(params); + pb.setSourceRunId(sourceRunId); + pb.setTags(tags); + + return pb; + } + + static CreateLoggedModelRequest fromPb(CreateLoggedModelRequestPb pb) { + CreateLoggedModelRequest model = new CreateLoggedModelRequest(); + model.setExperimentId(pb.getExperimentId()); + model.setModelType(pb.getModelType()); + model.setName(pb.getName()); + model.setParams(pb.getParams()); + model.setSourceRunId(pb.getSourceRunId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreateLoggedModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateLoggedModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateLoggedModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateLoggedModelRequestDeserializer + extends JsonDeserializer { + @Override + public CreateLoggedModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateLoggedModelRequestPb pb = mapper.readValue(p, CreateLoggedModelRequestPb.class); + return CreateLoggedModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequestPb.java new file mode 100755 index 000000000..24ab61017 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequestPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateLoggedModelRequestPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("model_type") + private String modelType; + + @JsonProperty("name") + private String name; + + @JsonProperty("params") + private Collection params; + + @JsonProperty("source_run_id") + private String sourceRunId; + + @JsonProperty("tags") + private Collection tags; + + public CreateLoggedModelRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public CreateLoggedModelRequestPb setModelType(String modelType) { + this.modelType = modelType; + return this; + } + + public String getModelType() { + return modelType; + } + + public CreateLoggedModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateLoggedModelRequestPb setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + public CreateLoggedModelRequestPb setSourceRunId(String sourceRunId) { + this.sourceRunId = sourceRunId; + return this; + } + + public String getSourceRunId() { + return sourceRunId; + } + + public CreateLoggedModelRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateLoggedModelRequestPb that = (CreateLoggedModelRequestPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(modelType, that.modelType) + && Objects.equals(name, that.name) + && Objects.equals(params, that.params) + && Objects.equals(sourceRunId, that.sourceRunId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, modelType, name, params, sourceRunId, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateLoggedModelRequestPb.class) + .add("experimentId", experimentId) + .add("modelType", modelType) + .add("name", name) + .add("params", params) + .add("sourceRunId", sourceRunId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java index 4e74cc259..8ee4aeff6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateLoggedModelResponse.CreateLoggedModelResponseSerializer.class) +@JsonDeserialize(using = CreateLoggedModelResponse.CreateLoggedModelResponseDeserializer.class) public class CreateLoggedModelResponse { /** The newly created logged model. */ - @JsonProperty("model") private LoggedModel model; public CreateLoggedModelResponse setModel(LoggedModel model) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateLoggedModelResponse.class).add("model", model).toString(); } + + CreateLoggedModelResponsePb toPb() { + CreateLoggedModelResponsePb pb = new CreateLoggedModelResponsePb(); + pb.setModel(model); + + return pb; + } + + static CreateLoggedModelResponse fromPb(CreateLoggedModelResponsePb pb) { + CreateLoggedModelResponse model = new CreateLoggedModelResponse(); + model.setModel(pb.getModel()); + + return model; + } + + public static class CreateLoggedModelResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateLoggedModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateLoggedModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateLoggedModelResponseDeserializer + extends JsonDeserializer { + @Override + public CreateLoggedModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateLoggedModelResponsePb pb = mapper.readValue(p, CreateLoggedModelResponsePb.class); + return CreateLoggedModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponsePb.java new file mode 100755 index 000000000..04c2b3c47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateLoggedModelResponsePb { + @JsonProperty("model") + private LoggedModel model; + + public CreateLoggedModelResponsePb setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateLoggedModelResponsePb that = (CreateLoggedModelResponsePb) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(CreateLoggedModelResponsePb.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java index 636aa94b7..fa2003be3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateModelRequest.CreateModelRequestSerializer.class) +@JsonDeserialize(using = CreateModelRequest.CreateModelRequestDeserializer.class) public class CreateModelRequest { /** Optional description for registered model. */ - @JsonProperty("description") private String description; /** Register models under this name */ - @JsonProperty("name") private String name; /** Additional metadata for registered model. */ - @JsonProperty("tags") private Collection tags; public CreateModelRequest setDescription(String description) { @@ -72,4 +80,42 @@ public String toString() { .add("tags", tags) .toString(); } + + CreateModelRequestPb toPb() { + CreateModelRequestPb pb = new CreateModelRequestPb(); + pb.setDescription(description); + pb.setName(name); + pb.setTags(tags); + + return pb; + } + + static CreateModelRequest fromPb(CreateModelRequestPb pb) { + CreateModelRequest model = new CreateModelRequest(); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreateModelRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateModelRequestDeserializer extends JsonDeserializer { + @Override + public CreateModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateModelRequestPb pb = mapper.readValue(p, CreateModelRequestPb.class); + return CreateModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequestPb.java new file mode 100755 index 000000000..048f4706c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateModelRequestPb { + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("tags") + private Collection tags; + + public CreateModelRequestPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateModelRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateModelRequestPb that = (CreateModelRequestPb) o; + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(description, name, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateModelRequestPb.class) + .add("description", description) + .add("name", name) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java index b354e3cf4..5c9289267 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateModelResponse.CreateModelResponseSerializer.class) +@JsonDeserialize(using = CreateModelResponse.CreateModelResponseDeserializer.class) public class CreateModelResponse { /** */ - @JsonProperty("registered_model") private Model registeredModel; public CreateModelResponse setRegisteredModel(Model registeredModel) { @@ -41,4 +51,39 @@ public String toString() { .add("registeredModel", registeredModel) .toString(); } + + CreateModelResponsePb toPb() { + CreateModelResponsePb pb = new CreateModelResponsePb(); + pb.setRegisteredModel(registeredModel); + + return pb; + } + + static CreateModelResponse fromPb(CreateModelResponsePb pb) { + CreateModelResponse model = new CreateModelResponse(); + model.setRegisteredModel(pb.getRegisteredModel()); + + return model; + } + + public static class CreateModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateModelResponseDeserializer + extends JsonDeserializer { + @Override + public CreateModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateModelResponsePb pb = mapper.readValue(p, CreateModelResponsePb.class); + return CreateModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponsePb.java new file mode 100755 index 000000000..2abd84e9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateModelResponsePb { + @JsonProperty("registered_model") + private Model registeredModel; + + public CreateModelResponsePb setRegisteredModel(Model registeredModel) { + this.registeredModel = registeredModel; + return this; + } + + public Model getRegisteredModel() { + return registeredModel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateModelResponsePb that = (CreateModelResponsePb) o; + return Objects.equals(registeredModel, that.registeredModel); + } + + @Override + public int hashCode() { + return Objects.hash(registeredModel); + } + + @Override + public String toString() { + return new ToStringer(CreateModelResponsePb.class) + .add("registeredModel", registeredModel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java index 6e59fd63b..1143c6abb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java @@ -4,40 +4,45 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateModelVersionRequest.CreateModelVersionRequestSerializer.class) +@JsonDeserialize(using = CreateModelVersionRequest.CreateModelVersionRequestDeserializer.class) public class CreateModelVersionRequest { /** Optional description for model version. */ - @JsonProperty("description") private String description; /** Register model under this name */ - @JsonProperty("name") private String name; /** * MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow * tracking server */ - @JsonProperty("run_id") private String runId; /** * MLflow run link - this is the exact link of the run that generated this model version, * potentially hosted at another instance of MLflow. */ - @JsonProperty("run_link") private String runLink; /** URI indicating the location of the model artifacts. */ - @JsonProperty("source") private String source; /** Additional metadata for model version. */ - @JsonProperty("tags") private Collection tags; public CreateModelVersionRequest setDescription(String description) { @@ -123,4 +128,51 @@ public String toString() { .add("tags", tags) .toString(); } + + CreateModelVersionRequestPb toPb() { + CreateModelVersionRequestPb pb = new CreateModelVersionRequestPb(); + pb.setDescription(description); + pb.setName(name); + pb.setRunId(runId); + pb.setRunLink(runLink); + pb.setSource(source); + pb.setTags(tags); + + return pb; + } + + static CreateModelVersionRequest fromPb(CreateModelVersionRequestPb pb) { + CreateModelVersionRequest model = new CreateModelVersionRequest(); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setRunId(pb.getRunId()); + model.setRunLink(pb.getRunLink()); + model.setSource(pb.getSource()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreateModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public CreateModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateModelVersionRequestPb pb = mapper.readValue(p, CreateModelVersionRequestPb.class); + return CreateModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequestPb.java new file mode 100755 index 000000000..d070b29a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequestPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateModelVersionRequestPb { + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_link") + private String runLink; + + @JsonProperty("source") + private String source; + + @JsonProperty("tags") + private Collection tags; + + public CreateModelVersionRequestPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateModelVersionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateModelVersionRequestPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public CreateModelVersionRequestPb setRunLink(String runLink) { + this.runLink = runLink; + return this; + } + + public String getRunLink() { + return runLink; + } + + public CreateModelVersionRequestPb setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public CreateModelVersionRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateModelVersionRequestPb that = (CreateModelVersionRequestPb) o; + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(runId, that.runId) + && Objects.equals(runLink, that.runLink) + && Objects.equals(source, that.source) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(description, name, runId, runLink, source, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateModelVersionRequestPb.class) + .add("description", description) + .add("name", name) + .add("runId", runId) + .add("runLink", runLink) + .add("source", source) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponse.java index 4f6749858..82ce90978 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateModelVersionResponse.CreateModelVersionResponseSerializer.class) +@JsonDeserialize(using = CreateModelVersionResponse.CreateModelVersionResponseDeserializer.class) public class CreateModelVersionResponse { /** Return new version number generated for this model in registry. */ - @JsonProperty("model_version") private ModelVersion modelVersion; public CreateModelVersionResponse setModelVersion(ModelVersion modelVersion) { @@ -41,4 +51,41 @@ public String toString() { .add("modelVersion", modelVersion) .toString(); } + + CreateModelVersionResponsePb toPb() { + CreateModelVersionResponsePb pb = new CreateModelVersionResponsePb(); + pb.setModelVersion(modelVersion); + + return pb; + } + + static CreateModelVersionResponse fromPb(CreateModelVersionResponsePb pb) { + CreateModelVersionResponse model = new CreateModelVersionResponse(); + model.setModelVersion(pb.getModelVersion()); + + return model; + } + + public static class CreateModelVersionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateModelVersionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateModelVersionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateModelVersionResponseDeserializer + extends JsonDeserializer { + @Override + public CreateModelVersionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateModelVersionResponsePb pb = mapper.readValue(p, CreateModelVersionResponsePb.class); + return CreateModelVersionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponsePb.java new file mode 100755 index 000000000..d196bda7d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateModelVersionResponsePb { + @JsonProperty("model_version") + private ModelVersion modelVersion; + + public CreateModelVersionResponsePb setModelVersion(ModelVersion modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public ModelVersion getModelVersion() { + return modelVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateModelVersionResponsePb that = (CreateModelVersionResponsePb) o; + return Objects.equals(modelVersion, that.modelVersion); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersion); + } + + @Override + public String toString() { + return new ToStringer(CreateModelVersionResponsePb.class) + .add("modelVersion", modelVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java index 3d4f594b2..b935c6242 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRegistryWebhook.CreateRegistryWebhookSerializer.class) +@JsonDeserialize(using = CreateRegistryWebhook.CreateRegistryWebhookDeserializer.class) public class CreateRegistryWebhook { /** User-specified description for the webhook. */ - @JsonProperty("description") private String description; /** @@ -45,19 +55,18 @@ public class CreateRegistryWebhook { * *

* `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. */ - @JsonProperty("events") private Collection events; /** */ - @JsonProperty("http_url_spec") private HttpUrlSpec httpUrlSpec; /** */ - @JsonProperty("job_spec") private JobSpec jobSpec; - /** Name of the model whose events would trigger this webhook. */ - @JsonProperty("model_name") + /** + * If model name is not specified, a registry-wide webhook is created that listens for the + * specified events across all versions of all registered models. + */ private String modelName; /** @@ -69,7 +78,6 @@ public class CreateRegistryWebhook { *

* `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a * real event. */ - @JsonProperty("status") private RegistryWebhookStatus status; public CreateRegistryWebhook setDescription(String description) { @@ -155,4 +163,51 @@ public String toString() { .add("status", status) .toString(); } + + CreateRegistryWebhookPb toPb() { + CreateRegistryWebhookPb pb = new CreateRegistryWebhookPb(); + pb.setDescription(description); + pb.setEvents(events); + pb.setHttpUrlSpec(httpUrlSpec); + pb.setJobSpec(jobSpec); + pb.setModelName(modelName); + pb.setStatus(status); + + return pb; + } + + static CreateRegistryWebhook fromPb(CreateRegistryWebhookPb pb) { + CreateRegistryWebhook model = new CreateRegistryWebhook(); + model.setDescription(pb.getDescription()); + model.setEvents(pb.getEvents()); + model.setHttpUrlSpec(pb.getHttpUrlSpec()); + model.setJobSpec(pb.getJobSpec()); + model.setModelName(pb.getModelName()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class CreateRegistryWebhookSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateRegistryWebhook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRegistryWebhookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRegistryWebhookDeserializer + extends JsonDeserializer { + @Override + public CreateRegistryWebhook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRegistryWebhookPb pb = mapper.readValue(p, CreateRegistryWebhookPb.class); + return CreateRegistryWebhook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhookPb.java new file mode 100755 index 000000000..be1a32818 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhookPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateRegistryWebhookPb { + @JsonProperty("description") + private String description; + + @JsonProperty("events") + private Collection events; + + @JsonProperty("http_url_spec") + private HttpUrlSpec httpUrlSpec; + + @JsonProperty("job_spec") + private JobSpec jobSpec; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("status") + private RegistryWebhookStatus status; + + public CreateRegistryWebhookPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateRegistryWebhookPb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public CreateRegistryWebhookPb setHttpUrlSpec(HttpUrlSpec httpUrlSpec) { + this.httpUrlSpec = httpUrlSpec; + return this; + } + + public HttpUrlSpec getHttpUrlSpec() { + return httpUrlSpec; + } + + public CreateRegistryWebhookPb setJobSpec(JobSpec jobSpec) { + this.jobSpec = jobSpec; + return this; + } + + public JobSpec getJobSpec() { + return jobSpec; + } + + public CreateRegistryWebhookPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public CreateRegistryWebhookPb setStatus(RegistryWebhookStatus status) { + this.status = status; + return this; + } + + public RegistryWebhookStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRegistryWebhookPb that = (CreateRegistryWebhookPb) o; + return Objects.equals(description, that.description) + && Objects.equals(events, that.events) + && Objects.equals(httpUrlSpec, that.httpUrlSpec) + && Objects.equals(jobSpec, that.jobSpec) + && Objects.equals(modelName, that.modelName) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(description, events, httpUrlSpec, jobSpec, modelName, status); + } + + @Override + public String toString() { + return new ToStringer(CreateRegistryWebhookPb.class) + .add("description", description) + .add("events", events) + .add("httpUrlSpec", httpUrlSpec) + .add("jobSpec", jobSpec) + .add("modelName", modelName) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java index f4bad4f58..0b26166b0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRun.CreateRunSerializer.class) +@JsonDeserialize(using = CreateRun.CreateRunDeserializer.class) public class CreateRun { /** ID of the associated experiment. */ - @JsonProperty("experiment_id") private String experimentId; /** The name of the run. */ - @JsonProperty("run_name") private String runName; /** Unix timestamp in milliseconds of when the run started. */ - @JsonProperty("start_time") private Long startTime; /** Additional metadata for run. */ - @JsonProperty("tags") private Collection tags; /** * ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be * removed in a future MLflow release. Use 'mlflow.user' tag instead. */ - @JsonProperty("user_id") private String userId; public CreateRun setExperimentId(String experimentId) { @@ -105,4 +111,45 @@ public String toString() { .add("userId", userId) .toString(); } + + CreateRunPb toPb() { + CreateRunPb pb = new CreateRunPb(); + pb.setExperimentId(experimentId); + pb.setRunName(runName); + pb.setStartTime(startTime); + pb.setTags(tags); + pb.setUserId(userId); + + return pb; + } + + static CreateRun fromPb(CreateRunPb pb) { + CreateRun model = new CreateRun(); + model.setExperimentId(pb.getExperimentId()); + model.setRunName(pb.getRunName()); + model.setStartTime(pb.getStartTime()); + model.setTags(pb.getTags()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class CreateRunSerializer extends JsonSerializer { + @Override + public void serialize(CreateRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRunDeserializer extends JsonDeserializer { + @Override + public CreateRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRunPb pb = mapper.readValue(p, CreateRunPb.class); + return CreateRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunPb.java new file mode 100755 index 000000000..9c80394c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateRunPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("user_id") + private String userId; + + public CreateRunPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public CreateRunPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public CreateRunPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public CreateRunPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public CreateRunPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRunPb that = (CreateRunPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(runName, that.runName) + && Objects.equals(startTime, that.startTime) + && Objects.equals(tags, that.tags) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, runName, startTime, tags, userId); + } + + @Override + public String toString() { + return new ToStringer(CreateRunPb.class) + .add("experimentId", experimentId) + .add("runName", runName) + .add("startTime", startTime) + .add("tags", tags) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponse.java index 2c475dbaa..1e2684bea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRunResponse.CreateRunResponseSerializer.class) +@JsonDeserialize(using = CreateRunResponse.CreateRunResponseDeserializer.class) public class CreateRunResponse { /** The newly created run. */ - @JsonProperty("run") private Run run; public CreateRunResponse setRun(Run run) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(CreateRunResponse.class).add("run", run).toString(); } + + CreateRunResponsePb toPb() { + CreateRunResponsePb pb = new CreateRunResponsePb(); + pb.setRun(run); + + return pb; + } + + static CreateRunResponse fromPb(CreateRunResponsePb pb) { + CreateRunResponse model = new CreateRunResponse(); + model.setRun(pb.getRun()); + + return model; + } + + public static class CreateRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRunResponseDeserializer extends JsonDeserializer { + @Override + public CreateRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRunResponsePb pb = mapper.readValue(p, CreateRunResponsePb.class); + return CreateRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponsePb.java new file mode 100755 index 000000000..74ae41096 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateRunResponsePb { + @JsonProperty("run") + private Run run; + + public CreateRunResponsePb setRun(Run run) { + this.run = run; + return this; + } + + public Run getRun() { + return run; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRunResponsePb that = (CreateRunResponsePb) o; + return Objects.equals(run, that.run); + } + + @Override + public int hashCode() { + return Objects.hash(run); + } + + @Override + public String toString() { + return new ToStringer(CreateRunResponsePb.class).add("run", run).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java index 77f767114..716e81dd4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateTransitionRequest.CreateTransitionRequestSerializer.class) +@JsonDeserialize(using = CreateTransitionRequest.CreateTransitionRequestDeserializer.class) public class CreateTransitionRequest { /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Name of the model. */ - @JsonProperty("name") private String name; /** @@ -28,11 +37,9 @@ public class CreateTransitionRequest { * *

* `Archived`: Archived stage. */ - @JsonProperty("stage") private Stage stage; /** Version of the model. */ - @JsonProperty("version") private String version; public CreateTransitionRequest setComment(String comment) { @@ -96,4 +103,47 @@ public String toString() { .add("version", version) .toString(); } + + CreateTransitionRequestPb toPb() { + CreateTransitionRequestPb pb = new CreateTransitionRequestPb(); + pb.setComment(comment); + pb.setName(name); + pb.setStage(stage); + pb.setVersion(version); + + return pb; + } + + static CreateTransitionRequest fromPb(CreateTransitionRequestPb pb) { + CreateTransitionRequest model = new CreateTransitionRequest(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setStage(pb.getStage()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class CreateTransitionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateTransitionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateTransitionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateTransitionRequestDeserializer + extends JsonDeserializer { + @Override + public CreateTransitionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateTransitionRequestPb pb = mapper.readValue(p, CreateTransitionRequestPb.class); + return CreateTransitionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestPb.java new file mode 100755 index 000000000..af9cfb48c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateTransitionRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("stage") + private Stage stage; + + @JsonProperty("version") + private String version; + + public CreateTransitionRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateTransitionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateTransitionRequestPb setStage(Stage stage) { + this.stage = stage; + return this; + } + + public Stage getStage() { + return stage; + } + + public CreateTransitionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTransitionRequestPb that = (CreateTransitionRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(stage, that.stage) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, stage, version); + } + + @Override + public String toString() { + return new ToStringer(CreateTransitionRequestPb.class) + .add("comment", comment) + .add("name", name) + .add("stage", stage) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java index 78d011768..c03848263 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateTransitionRequestResponse.CreateTransitionRequestResponseSerializer.class) +@JsonDeserialize( + using = CreateTransitionRequestResponse.CreateTransitionRequestResponseDeserializer.class) public class CreateTransitionRequestResponse { /** Transition request details. */ - @JsonProperty("request") private TransitionRequest request; public CreateTransitionRequestResponse setRequest(TransitionRequest request) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(CreateTransitionRequestResponse.class).add("request", request).toString(); } + + CreateTransitionRequestResponsePb toPb() { + CreateTransitionRequestResponsePb pb = new CreateTransitionRequestResponsePb(); + pb.setRequest(request); + + return pb; + } + + static CreateTransitionRequestResponse fromPb(CreateTransitionRequestResponsePb pb) { + CreateTransitionRequestResponse model = new CreateTransitionRequestResponse(); + model.setRequest(pb.getRequest()); + + return model; + } + + public static class CreateTransitionRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateTransitionRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateTransitionRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateTransitionRequestResponseDeserializer + extends JsonDeserializer { + @Override + public CreateTransitionRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateTransitionRequestResponsePb pb = + mapper.readValue(p, CreateTransitionRequestResponsePb.class); + return CreateTransitionRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponsePb.java new file mode 100755 index 000000000..b9a480e76 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateTransitionRequestResponsePb { + @JsonProperty("request") + private TransitionRequest request; + + public CreateTransitionRequestResponsePb setRequest(TransitionRequest request) { + this.request = request; + return this; + } + + public TransitionRequest getRequest() { + return request; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTransitionRequestResponsePb that = (CreateTransitionRequestResponsePb) o; + return Objects.equals(request, that.request); + } + + @Override + public int hashCode() { + return Objects.hash(request); + } + + @Override + public String toString() { + return new ToStringer(CreateTransitionRequestResponsePb.class) + .add("request", request) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponse.java index dcadde5d3..2feb7b10b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateWebhookResponse.CreateWebhookResponseSerializer.class) +@JsonDeserialize(using = CreateWebhookResponse.CreateWebhookResponseDeserializer.class) public class CreateWebhookResponse { /** */ - @JsonProperty("webhook") private RegistryWebhook webhook; public CreateWebhookResponse setWebhook(RegistryWebhook webhook) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateWebhookResponse.class).add("webhook", webhook).toString(); } + + CreateWebhookResponsePb toPb() { + CreateWebhookResponsePb pb = new CreateWebhookResponsePb(); + pb.setWebhook(webhook); + + return pb; + } + + static CreateWebhookResponse fromPb(CreateWebhookResponsePb pb) { + CreateWebhookResponse model = new CreateWebhookResponse(); + model.setWebhook(pb.getWebhook()); + + return model; + } + + public static class CreateWebhookResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateWebhookResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateWebhookResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateWebhookResponseDeserializer + extends JsonDeserializer { + @Override + public CreateWebhookResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateWebhookResponsePb pb = mapper.readValue(p, CreateWebhookResponsePb.class); + return CreateWebhookResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponsePb.java new file mode 100755 index 000000000..acd685575 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateWebhookResponsePb { + @JsonProperty("webhook") + private RegistryWebhook webhook; + + public CreateWebhookResponsePb setWebhook(RegistryWebhook webhook) { + this.webhook = webhook; + return this; + } + + public RegistryWebhook getWebhook() { + return webhook; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWebhookResponsePb that = (CreateWebhookResponsePb) o; + return Objects.equals(webhook, that.webhook); + } + + @Override + public int hashCode() { + return Objects.hash(webhook); + } + + @Override + public String toString() { + return new ToStringer(CreateWebhookResponsePb.class).add("webhook", webhook).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java index 325ee15eb..4323afa5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,41 +21,37 @@ * model development process. */ @Generated +@JsonSerialize(using = Dataset.DatasetSerializer.class) +@JsonDeserialize(using = Dataset.DatasetDeserializer.class) public class Dataset { /** * Dataset digest, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of * the same name. */ - @JsonProperty("digest") private String digest; /** The name of the dataset. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3” */ - @JsonProperty("name") private String name; /** * The profile of the dataset. Summary statistics for the dataset, such as the number of rows in a * table, the mean / std / mode of each column in a table, or the number of elements in an array. */ - @JsonProperty("profile") private String profile; /** * The schema of the dataset. E.g., MLflow ColSpec JSON for a dataframe, MLflow TensorSpec JSON * for an ndarray, or another schema format. */ - @JsonProperty("schema") private String schema; /** * Source information for the dataset. Note that the source may not exactly reproduce the dataset * if it was transformed / modified before use with MLflow. */ - @JsonProperty("source") private String source; /** The type of the dataset source, e.g. ‘databricks-uc-table’, ‘DBFS’, ‘S3’, ... */ - @JsonProperty("source_type") private String sourceType; public Dataset setDigest(String digest) { @@ -132,4 +137,47 @@ public String toString() { .add("sourceType", sourceType) .toString(); } + + DatasetPb toPb() { + DatasetPb pb = new DatasetPb(); + pb.setDigest(digest); + pb.setName(name); + pb.setProfile(profile); + pb.setSchema(schema); + pb.setSource(source); + pb.setSourceType(sourceType); + + return pb; + } + + static Dataset fromPb(DatasetPb pb) { + Dataset model = new Dataset(); + model.setDigest(pb.getDigest()); + model.setName(pb.getName()); + model.setProfile(pb.getProfile()); + model.setSchema(pb.getSchema()); + model.setSource(pb.getSource()); + model.setSourceType(pb.getSourceType()); + + return model; + } + + public static class DatasetSerializer extends JsonSerializer { + @Override + public void serialize(Dataset value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatasetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatasetDeserializer extends JsonDeserializer { + @Override + public Dataset deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatasetPb pb = mapper.readValue(p, DatasetPb.class); + return Dataset.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java index dfcb8963c..93804b2b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** DatasetInput. Represents a dataset and input tags. */ @Generated +@JsonSerialize(using = DatasetInput.DatasetInputSerializer.class) +@JsonDeserialize(using = DatasetInput.DatasetInputDeserializer.class) public class DatasetInput { /** The dataset being used as a Run input. */ - @JsonProperty("dataset") private Dataset dataset; /** A list of tags for the dataset input, e.g. a “context” tag with value “training” */ - @JsonProperty("tags") private Collection tags; public DatasetInput setDataset(Dataset dataset) { @@ -54,4 +63,39 @@ public int hashCode() { public String toString() { return new ToStringer(DatasetInput.class).add("dataset", dataset).add("tags", tags).toString(); } + + DatasetInputPb toPb() { + DatasetInputPb pb = new DatasetInputPb(); + pb.setDataset(dataset); + pb.setTags(tags); + + return pb; + } + + static DatasetInput fromPb(DatasetInputPb pb) { + DatasetInput model = new DatasetInput(); + model.setDataset(pb.getDataset()); + model.setTags(pb.getTags()); + + return model; + } + + public static class DatasetInputSerializer extends JsonSerializer { + @Override + public void serialize(DatasetInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatasetInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatasetInputDeserializer extends JsonDeserializer { + @Override + public DatasetInput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatasetInputPb pb = mapper.readValue(p, DatasetInputPb.class); + return DatasetInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInputPb.java new file mode 100755 index 000000000..4a2bf1157 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInputPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** DatasetInput. Represents a dataset and input tags. */ +@Generated +class DatasetInputPb { + @JsonProperty("dataset") + private Dataset dataset; + + @JsonProperty("tags") + private Collection tags; + + public DatasetInputPb setDataset(Dataset dataset) { + this.dataset = dataset; + return this; + } + + public Dataset getDataset() { + return dataset; + } + + public DatasetInputPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatasetInputPb that = (DatasetInputPb) o; + return Objects.equals(dataset, that.dataset) && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(dataset, tags); + } + + @Override + public String toString() { + return new ToStringer(DatasetInputPb.class) + .add("dataset", dataset) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetPb.java new file mode 100755 index 000000000..b95da9c9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetPb.java @@ -0,0 +1,117 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Dataset. Represents a reference to data used for training, testing, or evaluation during the + * model development process. + */ +@Generated +class DatasetPb { + @JsonProperty("digest") + private String digest; + + @JsonProperty("name") + private String name; + + @JsonProperty("profile") + private String profile; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("source") + private String source; + + @JsonProperty("source_type") + private String sourceType; + + public DatasetPb setDigest(String digest) { + this.digest = digest; + return this; + } + + public String getDigest() { + return digest; + } + + public DatasetPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatasetPb setProfile(String profile) { + this.profile = profile; + return this; + } + + public String getProfile() { + return profile; + } + + public DatasetPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public DatasetPb setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public DatasetPb setSourceType(String sourceType) { + this.sourceType = sourceType; + return this; + } + + public String getSourceType() { + return sourceType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatasetPb that = (DatasetPb) o; + return Objects.equals(digest, that.digest) + && Objects.equals(name, that.name) + && Objects.equals(profile, that.profile) + && Objects.equals(schema, that.schema) + && Objects.equals(source, that.source) + && Objects.equals(sourceType, that.sourceType); + } + + @Override + public int hashCode() { + return Objects.hash(digest, name, profile, schema, source, sourceType); + } + + @Override + public String toString() { + return new ToStringer(DatasetPb.class) + .add("digest", digest) + .add("name", name) + .add("profile", profile) + .add("schema", schema) + .add("source", source) + .add("sourceType", sourceType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java index 0b4986340..1d100763d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a comment */ @Generated +@JsonSerialize(using = DeleteCommentRequest.DeleteCommentRequestSerializer.class) +@JsonDeserialize(using = DeleteCommentRequest.DeleteCommentRequestDeserializer.class) public class DeleteCommentRequest { - /** */ - @JsonIgnore - @QueryParam("id") + /** Unique identifier of an activity */ private String id; public DeleteCommentRequest setId(String id) { @@ -42,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCommentRequest.class).add("id", id).toString(); } + + DeleteCommentRequestPb toPb() { + DeleteCommentRequestPb pb = new DeleteCommentRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteCommentRequest fromPb(DeleteCommentRequestPb pb) { + DeleteCommentRequest model = new DeleteCommentRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteCommentRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteCommentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCommentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCommentRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCommentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCommentRequestPb pb = mapper.readValue(p, DeleteCommentRequestPb.class); + return DeleteCommentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequestPb.java new file mode 100755 index 000000000..584d770d1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a comment */ +@Generated +class DeleteCommentRequestPb { + @JsonIgnore + @QueryParam("id") + private String id; + + public DeleteCommentRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCommentRequestPb that = (DeleteCommentRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteCommentRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java index eae7ebb02..633fd8e0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteCommentResponse.DeleteCommentResponseSerializer.class) +@JsonDeserialize(using = DeleteCommentResponse.DeleteCommentResponseDeserializer.class) public class DeleteCommentResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCommentResponse.class).toString(); } + + DeleteCommentResponsePb toPb() { + DeleteCommentResponsePb pb = new DeleteCommentResponsePb(); + + return pb; + } + + static DeleteCommentResponse fromPb(DeleteCommentResponsePb pb) { + DeleteCommentResponse model = new DeleteCommentResponse(); + + return model; + } + + public static class DeleteCommentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCommentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCommentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCommentResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteCommentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCommentResponsePb pb = mapper.readValue(p, DeleteCommentResponsePb.class); + return DeleteCommentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponsePb.java new file mode 100755 index 000000000..a6231f0de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteCommentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCommentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java index 7338f2f76..4a8ef70f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteExperiment.DeleteExperimentSerializer.class) +@JsonDeserialize(using = DeleteExperiment.DeleteExperimentDeserializer.class) public class DeleteExperiment { /** ID of the associated experiment. */ - @JsonProperty("experiment_id") private String experimentId; public DeleteExperiment setExperimentId(String experimentId) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExperiment.class).add("experimentId", experimentId).toString(); } + + DeleteExperimentPb toPb() { + DeleteExperimentPb pb = new DeleteExperimentPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static DeleteExperiment fromPb(DeleteExperimentPb pb) { + DeleteExperiment model = new DeleteExperiment(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class DeleteExperimentSerializer extends JsonSerializer { + @Override + public void serialize(DeleteExperiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExperimentDeserializer extends JsonDeserializer { + @Override + public DeleteExperiment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExperimentPb pb = mapper.readValue(p, DeleteExperimentPb.class); + return DeleteExperiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentPb.java new file mode 100755 index 000000000..b9e363276 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteExperimentPb { + @JsonProperty("experiment_id") + private String experimentId; + + public DeleteExperimentPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExperimentPb that = (DeleteExperimentPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(DeleteExperimentPb.class).add("experimentId", experimentId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java index 2ef42a8bd..62628d2ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteExperimentResponse.DeleteExperimentResponseSerializer.class) +@JsonDeserialize(using = DeleteExperimentResponse.DeleteExperimentResponseDeserializer.class) public class DeleteExperimentResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteExperimentResponse.class).toString(); } + + DeleteExperimentResponsePb toPb() { + DeleteExperimentResponsePb pb = new DeleteExperimentResponsePb(); + + return pb; + } + + static DeleteExperimentResponse fromPb(DeleteExperimentResponsePb pb) { + DeleteExperimentResponse model = new DeleteExperimentResponse(); + + return model; + } + + public static class DeleteExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteExperimentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteExperimentResponsePb pb = mapper.readValue(p, DeleteExperimentResponsePb.class); + return DeleteExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponsePb.java new file mode 100755 index 000000000..c56bdbca3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteExperimentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteExperimentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java index 011c95025..8978e1475 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a logged model */ @Generated +@JsonSerialize(using = DeleteLoggedModelRequest.DeleteLoggedModelRequestSerializer.class) +@JsonDeserialize(using = DeleteLoggedModelRequest.DeleteLoggedModelRequestDeserializer.class) public class DeleteLoggedModelRequest { /** The ID of the logged model to delete. */ - @JsonIgnore private String modelId; + private String modelId; public DeleteLoggedModelRequest setModelId(String modelId) { this.modelId = modelId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteLoggedModelRequest.class).add("modelId", modelId).toString(); } + + DeleteLoggedModelRequestPb toPb() { + DeleteLoggedModelRequestPb pb = new DeleteLoggedModelRequestPb(); + pb.setModelId(modelId); + + return pb; + } + + static DeleteLoggedModelRequest fromPb(DeleteLoggedModelRequestPb pb) { + DeleteLoggedModelRequest model = new DeleteLoggedModelRequest(); + model.setModelId(pb.getModelId()); + + return model; + } + + public static class DeleteLoggedModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLoggedModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteLoggedModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLoggedModelRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteLoggedModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLoggedModelRequestPb pb = mapper.readValue(p, DeleteLoggedModelRequestPb.class); + return DeleteLoggedModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequestPb.java new file mode 100755 index 000000000..31861359b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a logged model */ +@Generated +class DeleteLoggedModelRequestPb { + @JsonIgnore private String modelId; + + public DeleteLoggedModelRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLoggedModelRequestPb that = (DeleteLoggedModelRequestPb) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelRequestPb.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java index 4032513b7..301ec6d1b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteLoggedModelResponse.DeleteLoggedModelResponseSerializer.class) +@JsonDeserialize(using = DeleteLoggedModelResponse.DeleteLoggedModelResponseDeserializer.class) public class DeleteLoggedModelResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteLoggedModelResponse.class).toString(); } + + DeleteLoggedModelResponsePb toPb() { + DeleteLoggedModelResponsePb pb = new DeleteLoggedModelResponsePb(); + + return pb; + } + + static DeleteLoggedModelResponse fromPb(DeleteLoggedModelResponsePb pb) { + DeleteLoggedModelResponse model = new DeleteLoggedModelResponse(); + + return model; + } + + public static class DeleteLoggedModelResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLoggedModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteLoggedModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLoggedModelResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteLoggedModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLoggedModelResponsePb pb = mapper.readValue(p, DeleteLoggedModelResponsePb.class); + return DeleteLoggedModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponsePb.java new file mode 100755 index 000000000..e1ce85db8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteLoggedModelResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java index e5c0983e6..5789fa907 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a tag on a logged model */ @Generated +@JsonSerialize(using = DeleteLoggedModelTagRequest.DeleteLoggedModelTagRequestSerializer.class) +@JsonDeserialize(using = DeleteLoggedModelTagRequest.DeleteLoggedModelTagRequestDeserializer.class) public class DeleteLoggedModelTagRequest { /** The ID of the logged model to delete the tag from. */ - @JsonIgnore private String modelId; + private String modelId; /** The tag key. */ - @JsonIgnore private String tagKey; + private String tagKey; public DeleteLoggedModelTagRequest setModelId(String modelId) { this.modelId = modelId; @@ -54,4 +65,43 @@ public String toString() { .add("tagKey", tagKey) .toString(); } + + DeleteLoggedModelTagRequestPb toPb() { + DeleteLoggedModelTagRequestPb pb = new DeleteLoggedModelTagRequestPb(); + pb.setModelId(modelId); + pb.setTagKey(tagKey); + + return pb; + } + + static DeleteLoggedModelTagRequest fromPb(DeleteLoggedModelTagRequestPb pb) { + DeleteLoggedModelTagRequest model = new DeleteLoggedModelTagRequest(); + model.setModelId(pb.getModelId()); + model.setTagKey(pb.getTagKey()); + + return model; + } + + public static class DeleteLoggedModelTagRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLoggedModelTagRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteLoggedModelTagRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLoggedModelTagRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteLoggedModelTagRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLoggedModelTagRequestPb pb = mapper.readValue(p, DeleteLoggedModelTagRequestPb.class); + return DeleteLoggedModelTagRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequestPb.java new file mode 100755 index 000000000..cfc44ef49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a tag on a logged model */ +@Generated +class DeleteLoggedModelTagRequestPb { + @JsonIgnore private String modelId; + + @JsonIgnore private String tagKey; + + public DeleteLoggedModelTagRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public DeleteLoggedModelTagRequestPb setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLoggedModelTagRequestPb that = (DeleteLoggedModelTagRequestPb) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(tagKey, that.tagKey); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, tagKey); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelTagRequestPb.class) + .add("modelId", modelId) + .add("tagKey", tagKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java index 5e9f53856..23521d2a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java @@ -4,9 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteLoggedModelTagResponse.DeleteLoggedModelTagResponseSerializer.class) +@JsonDeserialize( + using = DeleteLoggedModelTagResponse.DeleteLoggedModelTagResponseDeserializer.class) public class DeleteLoggedModelTagResponse { @Override @@ -25,4 +38,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteLoggedModelTagResponse.class).toString(); } + + DeleteLoggedModelTagResponsePb toPb() { + DeleteLoggedModelTagResponsePb pb = new DeleteLoggedModelTagResponsePb(); + + return pb; + } + + static DeleteLoggedModelTagResponse fromPb(DeleteLoggedModelTagResponsePb pb) { + DeleteLoggedModelTagResponse model = new DeleteLoggedModelTagResponse(); + + return model; + } + + public static class DeleteLoggedModelTagResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLoggedModelTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteLoggedModelTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLoggedModelTagResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteLoggedModelTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLoggedModelTagResponsePb pb = mapper.readValue(p, DeleteLoggedModelTagResponsePb.class); + return DeleteLoggedModelTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponsePb.java new file mode 100755 index 000000000..714e02582 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteLoggedModelTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java index 58d5548c8..3ffcecbce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a model */ @Generated +@JsonSerialize(using = DeleteModelRequest.DeleteModelRequestSerializer.class) +@JsonDeserialize(using = DeleteModelRequest.DeleteModelRequestDeserializer.class) public class DeleteModelRequest { /** Registered model unique name identifier. */ - @JsonIgnore - @QueryParam("name") private String name; public DeleteModelRequest setName(String name) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelRequest.class).add("name", name).toString(); } + + DeleteModelRequestPb toPb() { + DeleteModelRequestPb pb = new DeleteModelRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteModelRequest fromPb(DeleteModelRequestPb pb) { + DeleteModelRequest model = new DeleteModelRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteModelRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelRequestDeserializer extends JsonDeserializer { + @Override + public DeleteModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelRequestPb pb = mapper.readValue(p, DeleteModelRequestPb.class); + return DeleteModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequestPb.java new file mode 100755 index 000000000..a48c65b87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a model */ +@Generated +class DeleteModelRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + public DeleteModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteModelRequestPb that = (DeleteModelRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java index 1b053c73f..6ea0c39e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteModelResponse.DeleteModelResponseSerializer.class) +@JsonDeserialize(using = DeleteModelResponse.DeleteModelResponseDeserializer.class) public class DeleteModelResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelResponse.class).toString(); } + + DeleteModelResponsePb toPb() { + DeleteModelResponsePb pb = new DeleteModelResponsePb(); + + return pb; + } + + static DeleteModelResponse fromPb(DeleteModelResponsePb pb) { + DeleteModelResponse model = new DeleteModelResponse(); + + return model; + } + + public static class DeleteModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelResponsePb pb = mapper.readValue(p, DeleteModelResponsePb.class); + return DeleteModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponsePb.java new file mode 100755 index 000000000..155d182fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteModelResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java index b46f87663..6ba9471b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a model tag */ @Generated +@JsonSerialize(using = DeleteModelTagRequest.DeleteModelTagRequestSerializer.class) +@JsonDeserialize(using = DeleteModelTagRequest.DeleteModelTagRequestDeserializer.class) public class DeleteModelTagRequest { /** * Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum * size is 250 bytes. */ - @JsonIgnore - @QueryParam("key") private String key; /** Name of the registered model that the tag was logged under. */ - @JsonIgnore - @QueryParam("name") private String name; public DeleteModelTagRequest setKey(String key) { @@ -59,4 +65,43 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelTagRequest.class).add("key", key).add("name", name).toString(); } + + DeleteModelTagRequestPb toPb() { + DeleteModelTagRequestPb pb = new DeleteModelTagRequestPb(); + pb.setKey(key); + pb.setName(name); + + return pb; + } + + static DeleteModelTagRequest fromPb(DeleteModelTagRequestPb pb) { + DeleteModelTagRequest model = new DeleteModelTagRequest(); + model.setKey(pb.getKey()); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteModelTagRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelTagRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelTagRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelTagRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteModelTagRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelTagRequestPb pb = mapper.readValue(p, DeleteModelTagRequestPb.class); + return DeleteModelTagRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequestPb.java new file mode 100755 index 000000000..72bf90e41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a model tag */ +@Generated +class DeleteModelTagRequestPb { + @JsonIgnore + @QueryParam("key") + private String key; + + @JsonIgnore + @QueryParam("name") + private String name; + + public DeleteModelTagRequestPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public DeleteModelTagRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteModelTagRequestPb that = (DeleteModelTagRequestPb) o; + return Objects.equals(key, that.key) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(key, name); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelTagRequestPb.class) + .add("key", key) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java index 983354a74..0ab927e93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteModelTagResponse.DeleteModelTagResponseSerializer.class) +@JsonDeserialize(using = DeleteModelTagResponse.DeleteModelTagResponseDeserializer.class) public class DeleteModelTagResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelTagResponse.class).toString(); } + + DeleteModelTagResponsePb toPb() { + DeleteModelTagResponsePb pb = new DeleteModelTagResponsePb(); + + return pb; + } + + static DeleteModelTagResponse fromPb(DeleteModelTagResponsePb pb) { + DeleteModelTagResponse model = new DeleteModelTagResponse(); + + return model; + } + + public static class DeleteModelTagResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelTagResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteModelTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelTagResponsePb pb = mapper.readValue(p, DeleteModelTagResponsePb.class); + return DeleteModelTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponsePb.java new file mode 100755 index 000000000..e54c78ec0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteModelTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java index 30b7a35c2..ddc401866 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a model version. */ @Generated +@JsonSerialize(using = DeleteModelVersionRequest.DeleteModelVersionRequestSerializer.class) +@JsonDeserialize(using = DeleteModelVersionRequest.DeleteModelVersionRequestDeserializer.class) public class DeleteModelVersionRequest { /** Name of the registered model */ - @JsonIgnore - @QueryParam("name") private String name; /** Model version number */ - @JsonIgnore - @QueryParam("version") private String version; public DeleteModelVersionRequest setName(String name) { @@ -59,4 +65,43 @@ public String toString() { .add("version", version) .toString(); } + + DeleteModelVersionRequestPb toPb() { + DeleteModelVersionRequestPb pb = new DeleteModelVersionRequestPb(); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static DeleteModelVersionRequest fromPb(DeleteModelVersionRequestPb pb) { + DeleteModelVersionRequest model = new DeleteModelVersionRequest(); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class DeleteModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelVersionRequestPb pb = mapper.readValue(p, DeleteModelVersionRequestPb.class); + return DeleteModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequestPb.java new file mode 100755 index 000000000..c37d9a1ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a model version. */ +@Generated +class DeleteModelVersionRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("version") + private String version; + + public DeleteModelVersionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteModelVersionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteModelVersionRequestPb that = (DeleteModelVersionRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelVersionRequestPb.class) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java index 1988edda3..40a83dfab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteModelVersionResponse.DeleteModelVersionResponseSerializer.class) +@JsonDeserialize(using = DeleteModelVersionResponse.DeleteModelVersionResponseDeserializer.class) public class DeleteModelVersionResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelVersionResponse.class).toString(); } + + DeleteModelVersionResponsePb toPb() { + DeleteModelVersionResponsePb pb = new DeleteModelVersionResponsePb(); + + return pb; + } + + static DeleteModelVersionResponse fromPb(DeleteModelVersionResponsePb pb) { + DeleteModelVersionResponse model = new DeleteModelVersionResponse(); + + return model; + } + + public static class DeleteModelVersionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelVersionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelVersionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelVersionResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteModelVersionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelVersionResponsePb pb = mapper.readValue(p, DeleteModelVersionResponsePb.class); + return DeleteModelVersionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponsePb.java new file mode 100755 index 000000000..ff3d4b104 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteModelVersionResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelVersionResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java index e05b0249a..067e8b564 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java @@ -3,30 +3,35 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a model version tag */ @Generated +@JsonSerialize(using = DeleteModelVersionTagRequest.DeleteModelVersionTagRequestSerializer.class) +@JsonDeserialize( + using = DeleteModelVersionTagRequest.DeleteModelVersionTagRequestDeserializer.class) public class DeleteModelVersionTagRequest { /** * Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum * size is 250 bytes. */ - @JsonIgnore - @QueryParam("key") private String key; /** Name of the registered model that the tag was logged under. */ - @JsonIgnore - @QueryParam("name") private String name; /** Model version number that the tag was logged under. */ - @JsonIgnore - @QueryParam("version") private String version; public DeleteModelVersionTagRequest setKey(String key) { @@ -79,4 +84,45 @@ public String toString() { .add("version", version) .toString(); } + + DeleteModelVersionTagRequestPb toPb() { + DeleteModelVersionTagRequestPb pb = new DeleteModelVersionTagRequestPb(); + pb.setKey(key); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static DeleteModelVersionTagRequest fromPb(DeleteModelVersionTagRequestPb pb) { + DeleteModelVersionTagRequest model = new DeleteModelVersionTagRequest(); + model.setKey(pb.getKey()); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class DeleteModelVersionTagRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelVersionTagRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelVersionTagRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelVersionTagRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteModelVersionTagRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelVersionTagRequestPb pb = mapper.readValue(p, DeleteModelVersionTagRequestPb.class); + return DeleteModelVersionTagRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequestPb.java new file mode 100755 index 000000000..d2ac02330 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a model version tag */ +@Generated +class DeleteModelVersionTagRequestPb { + @JsonIgnore + @QueryParam("key") + private String key; + + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("version") + private String version; + + public DeleteModelVersionTagRequestPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public DeleteModelVersionTagRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteModelVersionTagRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteModelVersionTagRequestPb that = (DeleteModelVersionTagRequestPb) o; + return Objects.equals(key, that.key) + && Objects.equals(name, that.name) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(key, name, version); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelVersionTagRequestPb.class) + .add("key", key) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java index d359f15af..1b2ea49c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java @@ -4,9 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteModelVersionTagResponse.DeleteModelVersionTagResponseSerializer.class) +@JsonDeserialize( + using = DeleteModelVersionTagResponse.DeleteModelVersionTagResponseDeserializer.class) public class DeleteModelVersionTagResponse { @Override @@ -25,4 +38,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteModelVersionTagResponse.class).toString(); } + + DeleteModelVersionTagResponsePb toPb() { + DeleteModelVersionTagResponsePb pb = new DeleteModelVersionTagResponsePb(); + + return pb; + } + + static DeleteModelVersionTagResponse fromPb(DeleteModelVersionTagResponsePb pb) { + DeleteModelVersionTagResponse model = new DeleteModelVersionTagResponse(); + + return model; + } + + public static class DeleteModelVersionTagResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteModelVersionTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteModelVersionTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteModelVersionTagResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteModelVersionTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteModelVersionTagResponsePb pb = + mapper.readValue(p, DeleteModelVersionTagResponsePb.class); + return DeleteModelVersionTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponsePb.java new file mode 100755 index 000000000..cdf24b9de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteModelVersionTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteModelVersionTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java index 3e811641b..4f3f0cc8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRun.DeleteRunSerializer.class) +@JsonDeserialize(using = DeleteRun.DeleteRunDeserializer.class) public class DeleteRun { /** ID of the run to delete. */ - @JsonProperty("run_id") private String runId; public DeleteRun setRunId(String runId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRun.class).add("runId", runId).toString(); } + + DeleteRunPb toPb() { + DeleteRunPb pb = new DeleteRunPb(); + pb.setRunId(runId); + + return pb; + } + + static DeleteRun fromPb(DeleteRunPb pb) { + DeleteRun model = new DeleteRun(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class DeleteRunSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunDeserializer extends JsonDeserializer { + @Override + public DeleteRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunPb pb = mapper.readValue(p, DeleteRunPb.class); + return DeleteRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunPb.java new file mode 100755 index 000000000..df142236a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteRunPb { + @JsonProperty("run_id") + private String runId; + + public DeleteRunPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRunPb that = (DeleteRunPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java index 8dfe844e7..954929877 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRunResponse.DeleteRunResponseSerializer.class) +@JsonDeserialize(using = DeleteRunResponse.DeleteRunResponseDeserializer.class) public class DeleteRunResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRunResponse.class).toString(); } + + DeleteRunResponsePb toPb() { + DeleteRunResponsePb pb = new DeleteRunResponsePb(); + + return pb; + } + + static DeleteRunResponse fromPb(DeleteRunResponsePb pb) { + DeleteRunResponse model = new DeleteRunResponse(); + + return model; + } + + public static class DeleteRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunResponseDeserializer extends JsonDeserializer { + @Override + public DeleteRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunResponsePb pb = mapper.readValue(p, DeleteRunResponsePb.class); + return DeleteRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponsePb.java new file mode 100755 index 000000000..1624dc63b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteRunResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java index 4cbd65156..814356e17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java @@ -4,27 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRuns.DeleteRunsSerializer.class) +@JsonDeserialize(using = DeleteRuns.DeleteRunsDeserializer.class) public class DeleteRuns { /** The ID of the experiment containing the runs to delete. */ - @JsonProperty("experiment_id") private String experimentId; /** * An optional positive integer indicating the maximum number of runs to delete. The maximum * allowed value for max_runs is 10000. */ - @JsonProperty("max_runs") private Long maxRuns; /** * The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only * runs created prior to or at this timestamp are deleted. */ - @JsonProperty("max_timestamp_millis") private Long maxTimestampMillis; public DeleteRuns setExperimentId(String experimentId) { @@ -77,4 +85,41 @@ public String toString() { .add("maxTimestampMillis", maxTimestampMillis) .toString(); } + + DeleteRunsPb toPb() { + DeleteRunsPb pb = new DeleteRunsPb(); + pb.setExperimentId(experimentId); + pb.setMaxRuns(maxRuns); + pb.setMaxTimestampMillis(maxTimestampMillis); + + return pb; + } + + static DeleteRuns fromPb(DeleteRunsPb pb) { + DeleteRuns model = new DeleteRuns(); + model.setExperimentId(pb.getExperimentId()); + model.setMaxRuns(pb.getMaxRuns()); + model.setMaxTimestampMillis(pb.getMaxTimestampMillis()); + + return model; + } + + public static class DeleteRunsSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRuns value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunsDeserializer extends JsonDeserializer { + @Override + public DeleteRuns deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunsPb pb = mapper.readValue(p, DeleteRunsPb.class); + return DeleteRuns.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsPb.java new file mode 100755 index 000000000..3ee475647 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteRunsPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("max_runs") + private Long maxRuns; + + @JsonProperty("max_timestamp_millis") + private Long maxTimestampMillis; + + public DeleteRunsPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public DeleteRunsPb setMaxRuns(Long maxRuns) { + this.maxRuns = maxRuns; + return this; + } + + public Long getMaxRuns() { + return maxRuns; + } + + public DeleteRunsPb setMaxTimestampMillis(Long maxTimestampMillis) { + this.maxTimestampMillis = maxTimestampMillis; + return this; + } + + public Long getMaxTimestampMillis() { + return maxTimestampMillis; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRunsPb that = (DeleteRunsPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(maxRuns, that.maxRuns) + && Objects.equals(maxTimestampMillis, that.maxTimestampMillis); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, maxRuns, maxTimestampMillis); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunsPb.class) + .add("experimentId", experimentId) + .add("maxRuns", maxRuns) + .add("maxTimestampMillis", maxTimestampMillis) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java index 92aa1f824..3ca217446 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRunsResponse.DeleteRunsResponseSerializer.class) +@JsonDeserialize(using = DeleteRunsResponse.DeleteRunsResponseDeserializer.class) public class DeleteRunsResponse { /** The number of runs deleted. */ - @JsonProperty("runs_deleted") private Long runsDeleted; public DeleteRunsResponse setRunsDeleted(Long runsDeleted) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRunsResponse.class).add("runsDeleted", runsDeleted).toString(); } + + DeleteRunsResponsePb toPb() { + DeleteRunsResponsePb pb = new DeleteRunsResponsePb(); + pb.setRunsDeleted(runsDeleted); + + return pb; + } + + static DeleteRunsResponse fromPb(DeleteRunsResponsePb pb) { + DeleteRunsResponse model = new DeleteRunsResponse(); + model.setRunsDeleted(pb.getRunsDeleted()); + + return model; + } + + public static class DeleteRunsResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRunsResponseDeserializer extends JsonDeserializer { + @Override + public DeleteRunsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRunsResponsePb pb = mapper.readValue(p, DeleteRunsResponsePb.class); + return DeleteRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponsePb.java new file mode 100755 index 000000000..52e98648c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteRunsResponsePb { + @JsonProperty("runs_deleted") + private Long runsDeleted; + + public DeleteRunsResponsePb setRunsDeleted(Long runsDeleted) { + this.runsDeleted = runsDeleted; + return this; + } + + public Long getRunsDeleted() { + return runsDeleted; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRunsResponsePb that = (DeleteRunsResponsePb) o; + return Objects.equals(runsDeleted, that.runsDeleted); + } + + @Override + public int hashCode() { + return Objects.hash(runsDeleted); + } + + @Override + public String toString() { + return new ToStringer(DeleteRunsResponsePb.class).add("runsDeleted", runsDeleted).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java index 5b2fe7413..5198b5b56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteTag.DeleteTagSerializer.class) +@JsonDeserialize(using = DeleteTag.DeleteTagDeserializer.class) public class DeleteTag { /** Name of the tag. Maximum size is 255 bytes. Must be provided. */ - @JsonProperty("key") private String key; /** ID of the run that the tag was logged under. Must be provided. */ - @JsonProperty("run_id") private String runId; public DeleteTag setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteTag.class).add("key", key).add("runId", runId).toString(); } + + DeleteTagPb toPb() { + DeleteTagPb pb = new DeleteTagPb(); + pb.setKey(key); + pb.setRunId(runId); + + return pb; + } + + static DeleteTag fromPb(DeleteTagPb pb) { + DeleteTag model = new DeleteTag(); + model.setKey(pb.getKey()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class DeleteTagSerializer extends JsonSerializer { + @Override + public void serialize(DeleteTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTagDeserializer extends JsonDeserializer { + @Override + public DeleteTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTagPb pb = mapper.readValue(p, DeleteTagPb.class); + return DeleteTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagPb.java new file mode 100755 index 000000000..22085a22b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("run_id") + private String runId; + + public DeleteTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public DeleteTagPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTagPb that = (DeleteTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(key, runId); + } + + @Override + public String toString() { + return new ToStringer(DeleteTagPb.class).add("key", key).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java index c40c832de..847e02ac3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteTagResponse.DeleteTagResponseSerializer.class) +@JsonDeserialize(using = DeleteTagResponse.DeleteTagResponseDeserializer.class) public class DeleteTagResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteTagResponse.class).toString(); } + + DeleteTagResponsePb toPb() { + DeleteTagResponsePb pb = new DeleteTagResponsePb(); + + return pb; + } + + static DeleteTagResponse fromPb(DeleteTagResponsePb pb) { + DeleteTagResponse model = new DeleteTagResponse(); + + return model; + } + + public static class DeleteTagResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTagResponseDeserializer extends JsonDeserializer { + @Override + public DeleteTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTagResponsePb pb = mapper.readValue(p, DeleteTagResponsePb.class); + return DeleteTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponsePb.java new file mode 100755 index 000000000..52dfac480 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java index a326d9928..1df4f7fe5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java @@ -3,30 +3,36 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a transition request */ @Generated +@JsonSerialize( + using = DeleteTransitionRequestRequest.DeleteTransitionRequestRequestSerializer.class) +@JsonDeserialize( + using = DeleteTransitionRequestRequest.DeleteTransitionRequestRequestDeserializer.class) public class DeleteTransitionRequestRequest { /** User-provided comment on the action. */ - @JsonIgnore - @QueryParam("comment") private String comment; /** * Username of the user who created this request. Of the transition requests matching the * specified details, only the one transition created by this user will be deleted. */ - @JsonIgnore - @QueryParam("creator") private String creator; /** Name of the model. */ - @JsonIgnore - @QueryParam("name") private String name; /** @@ -40,13 +46,9 @@ public class DeleteTransitionRequestRequest { * *

* `Archived`: Archived stage. */ - @JsonIgnore - @QueryParam("stage") private DeleteTransitionRequestStage stage; /** Version of the model. */ - @JsonIgnore - @QueryParam("version") private String version; public DeleteTransitionRequestRequest setComment(String comment) { @@ -121,4 +123,50 @@ public String toString() { .add("version", version) .toString(); } + + DeleteTransitionRequestRequestPb toPb() { + DeleteTransitionRequestRequestPb pb = new DeleteTransitionRequestRequestPb(); + pb.setComment(comment); + pb.setCreator(creator); + pb.setName(name); + pb.setStage(stage); + pb.setVersion(version); + + return pb; + } + + static DeleteTransitionRequestRequest fromPb(DeleteTransitionRequestRequestPb pb) { + DeleteTransitionRequestRequest model = new DeleteTransitionRequestRequest(); + model.setComment(pb.getComment()); + model.setCreator(pb.getCreator()); + model.setName(pb.getName()); + model.setStage(pb.getStage()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class DeleteTransitionRequestRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteTransitionRequestRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTransitionRequestRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTransitionRequestRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteTransitionRequestRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTransitionRequestRequestPb pb = + mapper.readValue(p, DeleteTransitionRequestRequestPb.class); + return DeleteTransitionRequestRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequestPb.java new file mode 100755 index 000000000..79df9b9b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a transition request */ +@Generated +class DeleteTransitionRequestRequestPb { + @JsonIgnore + @QueryParam("comment") + private String comment; + + @JsonIgnore + @QueryParam("creator") + private String creator; + + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("stage") + private DeleteTransitionRequestStage stage; + + @JsonIgnore + @QueryParam("version") + private String version; + + public DeleteTransitionRequestRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public DeleteTransitionRequestRequestPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public DeleteTransitionRequestRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteTransitionRequestRequestPb setStage(DeleteTransitionRequestStage stage) { + this.stage = stage; + return this; + } + + public DeleteTransitionRequestStage getStage() { + return stage; + } + + public DeleteTransitionRequestRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTransitionRequestRequestPb that = (DeleteTransitionRequestRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(creator, that.creator) + && Objects.equals(name, that.name) + && Objects.equals(stage, that.stage) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(comment, creator, name, stage, version); + } + + @Override + public String toString() { + return new ToStringer(DeleteTransitionRequestRequestPb.class) + .add("comment", comment) + .add("creator", creator) + .add("name", name) + .add("stage", stage) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java index 78bab3b6c..fe1570cbf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DeleteTransitionRequestResponse.DeleteTransitionRequestResponseSerializer.class) +@JsonDeserialize( + using = DeleteTransitionRequestResponse.DeleteTransitionRequestResponseDeserializer.class) public class DeleteTransitionRequestResponse { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteTransitionRequestResponse.class).toString(); } + + DeleteTransitionRequestResponsePb toPb() { + DeleteTransitionRequestResponsePb pb = new DeleteTransitionRequestResponsePb(); + + return pb; + } + + static DeleteTransitionRequestResponse fromPb(DeleteTransitionRequestResponsePb pb) { + DeleteTransitionRequestResponse model = new DeleteTransitionRequestResponse(); + + return model; + } + + public static class DeleteTransitionRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteTransitionRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTransitionRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTransitionRequestResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteTransitionRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTransitionRequestResponsePb pb = + mapper.readValue(p, DeleteTransitionRequestResponsePb.class); + return DeleteTransitionRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponsePb.java new file mode 100755 index 000000000..a2b70d206 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteTransitionRequestResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteTransitionRequestResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java index 48ed6f8f5..6dd0499a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a webhook */ @Generated +@JsonSerialize(using = DeleteWebhookRequest.DeleteWebhookRequestSerializer.class) +@JsonDeserialize(using = DeleteWebhookRequest.DeleteWebhookRequestDeserializer.class) public class DeleteWebhookRequest { /** Webhook ID required to delete a registry webhook. */ - @JsonIgnore - @QueryParam("id") private String id; public DeleteWebhookRequest setId(String id) { @@ -42,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWebhookRequest.class).add("id", id).toString(); } + + DeleteWebhookRequestPb toPb() { + DeleteWebhookRequestPb pb = new DeleteWebhookRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteWebhookRequest fromPb(DeleteWebhookRequestPb pb) { + DeleteWebhookRequest model = new DeleteWebhookRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteWebhookRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteWebhookRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWebhookRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWebhookRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteWebhookRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWebhookRequestPb pb = mapper.readValue(p, DeleteWebhookRequestPb.class); + return DeleteWebhookRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequestPb.java new file mode 100755 index 000000000..fd5bf2859 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a webhook */ +@Generated +class DeleteWebhookRequestPb { + @JsonIgnore + @QueryParam("id") + private String id; + + public DeleteWebhookRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWebhookRequestPb that = (DeleteWebhookRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteWebhookRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java index 04056259e..ea0433a97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteWebhookResponse.DeleteWebhookResponseSerializer.class) +@JsonDeserialize(using = DeleteWebhookResponse.DeleteWebhookResponseDeserializer.class) public class DeleteWebhookResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWebhookResponse.class).toString(); } + + DeleteWebhookResponsePb toPb() { + DeleteWebhookResponsePb pb = new DeleteWebhookResponsePb(); + + return pb; + } + + static DeleteWebhookResponse fromPb(DeleteWebhookResponsePb pb) { + DeleteWebhookResponse model = new DeleteWebhookResponse(); + + return model; + } + + public static class DeleteWebhookResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWebhookResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWebhookResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWebhookResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteWebhookResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWebhookResponsePb pb = mapper.readValue(p, DeleteWebhookResponsePb.class); + return DeleteWebhookResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponsePb.java new file mode 100755 index 000000000..bd87ca28b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteWebhookResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteWebhookResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java index b23e8994f..317eb03bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java @@ -4,42 +4,46 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** An experiment and its metadata. */ @Generated +@JsonSerialize(using = Experiment.ExperimentSerializer.class) +@JsonDeserialize(using = Experiment.ExperimentDeserializer.class) public class Experiment { /** Location where artifacts for the experiment are stored. */ - @JsonProperty("artifact_location") private String artifactLocation; /** Creation time */ - @JsonProperty("creation_time") private Long creationTime; /** Unique identifier for the experiment. */ - @JsonProperty("experiment_id") private String experimentId; /** Last update time */ - @JsonProperty("last_update_time") private Long lastUpdateTime; /** * Current life cycle stage of the experiment: "active" or "deleted". Deleted experiments are not * returned by APIs. */ - @JsonProperty("lifecycle_stage") private String lifecycleStage; /** Human readable name that identifies the experiment. */ - @JsonProperty("name") private String name; /** Tags: Additional metadata key-value pairs. */ - @JsonProperty("tags") private Collection tags; public Experiment setArtifactLocation(String artifactLocation) { @@ -137,4 +141,49 @@ public String toString() { .add("tags", tags) .toString(); } + + ExperimentPb toPb() { + ExperimentPb pb = new ExperimentPb(); + pb.setArtifactLocation(artifactLocation); + pb.setCreationTime(creationTime); + pb.setExperimentId(experimentId); + pb.setLastUpdateTime(lastUpdateTime); + pb.setLifecycleStage(lifecycleStage); + pb.setName(name); + pb.setTags(tags); + + return pb; + } + + static Experiment fromPb(ExperimentPb pb) { + Experiment model = new Experiment(); + model.setArtifactLocation(pb.getArtifactLocation()); + model.setCreationTime(pb.getCreationTime()); + model.setExperimentId(pb.getExperimentId()); + model.setLastUpdateTime(pb.getLastUpdateTime()); + model.setLifecycleStage(pb.getLifecycleStage()); + model.setName(pb.getName()); + model.setTags(pb.getTags()); + + return model; + } + + public static class ExperimentSerializer extends JsonSerializer { + @Override + public void serialize(Experiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentDeserializer extends JsonDeserializer { + @Override + public Experiment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentPb pb = mapper.readValue(p, ExperimentPb.class); + return Experiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java index 250fa534d..9b87faa80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ExperimentAccessControlRequest.ExperimentAccessControlRequestSerializer.class) +@JsonDeserialize( + using = ExperimentAccessControlRequest.ExperimentAccessControlRequestDeserializer.class) public class ExperimentAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ExperimentAccessControlRequest setGroupName(String groupName) { @@ -87,4 +96,48 @@ public String toString() { .add("userName", userName) .toString(); } + + ExperimentAccessControlRequestPb toPb() { + ExperimentAccessControlRequestPb pb = new ExperimentAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ExperimentAccessControlRequest fromPb(ExperimentAccessControlRequestPb pb) { + ExperimentAccessControlRequest model = new ExperimentAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ExperimentAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ExperimentAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public ExperimentAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentAccessControlRequestPb pb = + mapper.readValue(p, ExperimentAccessControlRequestPb.class); + return ExperimentAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequestPb.java new file mode 100755 index 000000000..b6756217b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExperimentAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private ExperimentPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ExperimentAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ExperimentAccessControlRequestPb setPermissionLevel( + ExperimentPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ExperimentPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ExperimentAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ExperimentAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentAccessControlRequestPb that = (ExperimentAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ExperimentAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java index c786e7534..679918e16 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java @@ -4,30 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ExperimentAccessControlResponse.ExperimentAccessControlResponseSerializer.class) +@JsonDeserialize( + using = ExperimentAccessControlResponse.ExperimentAccessControlResponseDeserializer.class) public class ExperimentAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ExperimentAccessControlResponse setAllPermissions( @@ -103,4 +111,50 @@ public String toString() { .add("userName", userName) .toString(); } + + ExperimentAccessControlResponsePb toPb() { + ExperimentAccessControlResponsePb pb = new ExperimentAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ExperimentAccessControlResponse fromPb(ExperimentAccessControlResponsePb pb) { + ExperimentAccessControlResponse model = new ExperimentAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ExperimentAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ExperimentAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public ExperimentAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentAccessControlResponsePb pb = + mapper.readValue(p, ExperimentAccessControlResponsePb.class); + return ExperimentAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponsePb.java new file mode 100755 index 000000000..f75edc4f8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExperimentAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ExperimentAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public ExperimentAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ExperimentAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ExperimentAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ExperimentAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentAccessControlResponsePb that = (ExperimentAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ExperimentAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPb.java new file mode 100755 index 000000000..4db36e577 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPb.java @@ -0,0 +1,130 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** An experiment and its metadata. */ +@Generated +class ExperimentPb { + @JsonProperty("artifact_location") + private String artifactLocation; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("last_update_time") + private Long lastUpdateTime; + + @JsonProperty("lifecycle_stage") + private String lifecycleStage; + + @JsonProperty("name") + private String name; + + @JsonProperty("tags") + private Collection tags; + + public ExperimentPb setArtifactLocation(String artifactLocation) { + this.artifactLocation = artifactLocation; + return this; + } + + public String getArtifactLocation() { + return artifactLocation; + } + + public ExperimentPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public ExperimentPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public ExperimentPb setLastUpdateTime(Long lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + return this; + } + + public Long getLastUpdateTime() { + return lastUpdateTime; + } + + public ExperimentPb setLifecycleStage(String lifecycleStage) { + this.lifecycleStage = lifecycleStage; + return this; + } + + public String getLifecycleStage() { + return lifecycleStage; + } + + public ExperimentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExperimentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentPb that = (ExperimentPb) o; + return Objects.equals(artifactLocation, that.artifactLocation) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(experimentId, that.experimentId) + && Objects.equals(lastUpdateTime, that.lastUpdateTime) + && Objects.equals(lifecycleStage, that.lifecycleStage) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + artifactLocation, creationTime, experimentId, lastUpdateTime, lifecycleStage, name, tags); + } + + @Override + public String toString() { + return new ToStringer(ExperimentPb.class) + .add("artifactLocation", artifactLocation) + .add("creationTime", creationTime) + .add("experimentId", experimentId) + .add("lastUpdateTime", lastUpdateTime) + .add("lifecycleStage", lifecycleStage) + .add("name", name) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java index 8988beb05..76bd00e26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ExperimentPermission.ExperimentPermissionSerializer.class) +@JsonDeserialize(using = ExperimentPermission.ExperimentPermissionDeserializer.class) public class ExperimentPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; public ExperimentPermission setInherited(Boolean inherited) { @@ -72,4 +80,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ExperimentPermissionPb toPb() { + ExperimentPermissionPb pb = new ExperimentPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ExperimentPermission fromPb(ExperimentPermissionPb pb) { + ExperimentPermission model = new ExperimentPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ExperimentPermissionSerializer extends JsonSerializer { + @Override + public void serialize( + ExperimentPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentPermissionDeserializer + extends JsonDeserializer { + @Override + public ExperimentPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentPermissionPb pb = mapper.readValue(p, ExperimentPermissionPb.class); + return ExperimentPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionPb.java new file mode 100755 index 000000000..fa96ac441 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExperimentPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private ExperimentPermissionLevel permissionLevel; + + public ExperimentPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public ExperimentPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public ExperimentPermissionPb setPermissionLevel(ExperimentPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ExperimentPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentPermissionPb that = (ExperimentPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ExperimentPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissions.java index e0257cf3e..a248d8375 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ExperimentPermissions.ExperimentPermissionsSerializer.class) +@JsonDeserialize(using = ExperimentPermissions.ExperimentPermissionsDeserializer.class) public class ExperimentPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public ExperimentPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + ExperimentPermissionsPb toPb() { + ExperimentPermissionsPb pb = new ExperimentPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static ExperimentPermissions fromPb(ExperimentPermissionsPb pb) { + ExperimentPermissions model = new ExperimentPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class ExperimentPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + ExperimentPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentPermissionsDeserializer + extends JsonDeserializer { + @Override + public ExperimentPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentPermissionsPb pb = mapper.readValue(p, ExperimentPermissionsPb.class); + return ExperimentPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java index 0bb142d35..f5e02f595 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ExperimentPermissionsDescription.ExperimentPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = ExperimentPermissionsDescription.ExperimentPermissionsDescriptionDeserializer.class) public class ExperimentPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; public ExperimentPermissionsDescription setDescription(String description) { @@ -57,4 +68,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ExperimentPermissionsDescriptionPb toPb() { + ExperimentPermissionsDescriptionPb pb = new ExperimentPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ExperimentPermissionsDescription fromPb(ExperimentPermissionsDescriptionPb pb) { + ExperimentPermissionsDescription model = new ExperimentPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ExperimentPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + ExperimentPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public ExperimentPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentPermissionsDescriptionPb pb = + mapper.readValue(p, ExperimentPermissionsDescriptionPb.class); + return ExperimentPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescriptionPb.java new file mode 100755 index 000000000..25c6cd141 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExperimentPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private ExperimentPermissionLevel permissionLevel; + + public ExperimentPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ExperimentPermissionsDescriptionPb setPermissionLevel( + ExperimentPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ExperimentPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentPermissionsDescriptionPb that = (ExperimentPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ExperimentPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsPb.java new file mode 100755 index 000000000..23d67ff8a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExperimentPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public ExperimentPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ExperimentPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ExperimentPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentPermissionsPb that = (ExperimentPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(ExperimentPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequest.java index aa58e2809..65bbc3685 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequest.java @@ -4,19 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ExperimentPermissionsRequest.ExperimentPermissionsRequestSerializer.class) +@JsonDeserialize( + using = ExperimentPermissionsRequest.ExperimentPermissionsRequestDeserializer.class) public class ExperimentPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The experiment for which to get or manage permissions. */ - @JsonIgnore private String experimentId; + private String experimentId; public ExperimentPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +68,43 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + ExperimentPermissionsRequestPb toPb() { + ExperimentPermissionsRequestPb pb = new ExperimentPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setExperimentId(experimentId); + + return pb; + } + + static ExperimentPermissionsRequest fromPb(ExperimentPermissionsRequestPb pb) { + ExperimentPermissionsRequest model = new ExperimentPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class ExperimentPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ExperimentPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public ExperimentPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentPermissionsRequestPb pb = mapper.readValue(p, ExperimentPermissionsRequestPb.class); + return ExperimentPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequestPb.java new file mode 100755 index 000000000..2d276a16b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExperimentPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String experimentId; + + public ExperimentPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ExperimentPermissionsRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentPermissionsRequestPb that = (ExperimentPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, experimentId); + } + + @Override + public String toString() { + return new ToStringer(ExperimentPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java index 125870ff3..a6f40443f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A tag for an experiment. */ @Generated +@JsonSerialize(using = ExperimentTag.ExperimentTagSerializer.class) +@JsonDeserialize(using = ExperimentTag.ExperimentTagDeserializer.class) public class ExperimentTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public ExperimentTag setKey(String key) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(ExperimentTag.class).add("key", key).add("value", value).toString(); } + + ExperimentTagPb toPb() { + ExperimentTagPb pb = new ExperimentTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static ExperimentTag fromPb(ExperimentTagPb pb) { + ExperimentTag model = new ExperimentTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ExperimentTagSerializer extends JsonSerializer { + @Override + public void serialize(ExperimentTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExperimentTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExperimentTagDeserializer extends JsonDeserializer { + @Override + public ExperimentTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExperimentTagPb pb = mapper.readValue(p, ExperimentTagPb.class); + return ExperimentTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTagPb.java new file mode 100755 index 000000000..d02311476 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTagPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A tag for an experiment. */ +@Generated +class ExperimentTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public ExperimentTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ExperimentTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExperimentTagPb that = (ExperimentTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(ExperimentTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index bcc631e68..33040a824 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -173,30 +173,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - String requestId) { - return getCredentialsForTraceDataDownload( - new GetCredentialsForTraceDataDownloadRequest().setRequestId(requestId)); - } - - /** Get credentials to download trace data. */ - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - return impl.getCredentialsForTraceDataDownload(request); - } - - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - String requestId) { - return getCredentialsForTraceDataUpload( - new GetCredentialsForTraceDataUploadRequest().setRequestId(requestId)); - } - - /** Get credentials to upload trace data. */ - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - return impl.getCredentialsForTraceDataUpload(request); - } - public GetExperimentResponse getExperiment(String experimentId) { return getExperiment(new GetExperimentRequest().setExperimentId(experimentId)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index c228b7e72..f8aa9f5f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -21,7 +21,7 @@ public CreateExperimentResponse createExperiment(CreateExperiment request) { String path = "/api/2.0/mlflow/experiments/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateExperimentResponse.class); @@ -35,7 +35,7 @@ public CreateLoggedModelResponse createLoggedModel(CreateLoggedModelRequest requ String path = "/api/2.0/mlflow/logged-models"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateLoggedModelResponse.class); @@ -49,7 +49,7 @@ public CreateRunResponse createRun(CreateRun request) { String path = "/api/2.0/mlflow/runs/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateRunResponse.class); @@ -63,7 +63,7 @@ public void deleteExperiment(DeleteExperiment request) { String path = "/api/2.0/mlflow/experiments/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteExperimentResponse.class); @@ -77,7 +77,7 @@ public void deleteLoggedModel(DeleteLoggedModelRequest request) { String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteLoggedModelResponse.class); } catch (IOException e) { @@ -92,7 +92,7 @@ public void deleteLoggedModelTag(DeleteLoggedModelTagRequest request) { "/api/2.0/mlflow/logged-models/%s/tags/%s", request.getModelId(), request.getTagKey()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteLoggedModelTagResponse.class); } catch (IOException e) { @@ -105,7 +105,7 @@ public void deleteRun(DeleteRun request) { String path = "/api/2.0/mlflow/runs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteRunResponse.class); @@ -119,7 +119,7 @@ public DeleteRunsResponse deleteRuns(DeleteRuns request) { String path = "/api/2.0/mlflow/databricks/runs/delete-runs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DeleteRunsResponse.class); @@ -133,7 +133,7 @@ public void deleteTag(DeleteTag request) { String path = "/api/2.0/mlflow/runs/delete-tag"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteTagResponse.class); @@ -147,7 +147,7 @@ public FinalizeLoggedModelResponse finalizeLoggedModel(FinalizeLoggedModelReques String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FinalizeLoggedModelResponse.class); @@ -161,7 +161,7 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { String path = "/api/2.0/mlflow/experiments/get-by-name"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetExperimentByNameResponse.class); } catch (IOException e) { @@ -169,44 +169,12 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { } } - @Override - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetExperimentResponse.class); } catch (IOException e) { @@ -219,7 +187,7 @@ public GetMetricHistoryResponse getHistory(GetHistoryRequest request) { String path = "/api/2.0/mlflow/metrics/get-history"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetMetricHistoryResponse.class); } catch (IOException e) { @@ -232,7 +200,7 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) { String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetLoggedModelResponse.class); } catch (IOException e) { @@ -248,7 +216,7 @@ public GetExperimentPermissionLevelsResponse getPermissionLevels( "/api/2.0/permissions/experiments/%s/permissionLevels", request.getExperimentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetExperimentPermissionLevelsResponse.class); } catch (IOException e) { @@ -261,7 +229,7 @@ public ExperimentPermissions getPermissions(GetExperimentPermissionsRequest requ String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ExperimentPermissions.class); } catch (IOException e) { @@ -274,7 +242,7 @@ public GetRunResponse getRun(GetRunRequest request) { String path = "/api/2.0/mlflow/runs/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetRunResponse.class); } catch (IOException e) { @@ -287,7 +255,7 @@ public ListArtifactsResponse listArtifacts(ListArtifactsRequest request) { String path = "/api/2.0/mlflow/artifacts/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListArtifactsResponse.class); } catch (IOException e) { @@ -300,7 +268,7 @@ public ListExperimentsResponse listExperiments(ListExperimentsRequest request) { String path = "/api/2.0/mlflow/experiments/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListExperimentsResponse.class); } catch (IOException e) { @@ -316,7 +284,7 @@ public ListLoggedModelArtifactsResponse listLoggedModelArtifacts( "/api/2.0/mlflow/logged-models/%s/artifacts/directories", request.getModelId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListLoggedModelArtifactsResponse.class); } catch (IOException e) { @@ -329,7 +297,7 @@ public void logBatch(LogBatch request) { String path = "/api/2.0/mlflow/runs/log-batch"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogBatchResponse.class); @@ -343,7 +311,7 @@ public void logInputs(LogInputs request) { String path = "/api/2.0/mlflow/runs/log-inputs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogInputsResponse.class); @@ -357,7 +325,7 @@ public void logLoggedModelParams(LogLoggedModelParamsRequest request) { String path = String.format("/api/2.0/mlflow/logged-models/%s/params", request.getModelId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogLoggedModelParamsRequestResponse.class); @@ -371,7 +339,7 @@ public void logMetric(LogMetric request) { String path = "/api/2.0/mlflow/runs/log-metric"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogMetricResponse.class); @@ -385,7 +353,7 @@ public void logModel(LogModel request) { String path = "/api/2.0/mlflow/runs/log-model"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogModelResponse.class); @@ -399,7 +367,7 @@ public void logOutputs(LogOutputsRequest request) { String path = "/api/2.0/mlflow/runs/outputs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogOutputsResponse.class); @@ -413,7 +381,7 @@ public void logParam(LogParam request) { String path = "/api/2.0/mlflow/runs/log-parameter"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, LogParamResponse.class); @@ -427,7 +395,7 @@ public void restoreExperiment(RestoreExperiment request) { String path = "/api/2.0/mlflow/experiments/restore"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, RestoreExperimentResponse.class); @@ -441,7 +409,7 @@ public void restoreRun(RestoreRun request) { String path = "/api/2.0/mlflow/runs/restore"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, RestoreRunResponse.class); @@ -455,7 +423,7 @@ public RestoreRunsResponse restoreRuns(RestoreRuns request) { String path = "/api/2.0/mlflow/databricks/runs/restore-runs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RestoreRunsResponse.class); @@ -469,7 +437,7 @@ public SearchExperimentsResponse searchExperiments(SearchExperiments request) { String path = "/api/2.0/mlflow/experiments/search"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SearchExperimentsResponse.class); @@ -483,7 +451,7 @@ public SearchLoggedModelsResponse searchLoggedModels(SearchLoggedModelsRequest r String path = "/api/2.0/mlflow/logged-models/search"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SearchLoggedModelsResponse.class); @@ -497,7 +465,7 @@ public SearchRunsResponse searchRuns(SearchRuns request) { String path = "/api/2.0/mlflow/runs/search"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SearchRunsResponse.class); @@ -511,7 +479,7 @@ public void setExperimentTag(SetExperimentTag request) { String path = "/api/2.0/mlflow/experiments/set-experiment-tag"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetExperimentTagResponse.class); @@ -525,7 +493,7 @@ public void setLoggedModelTags(SetLoggedModelTagsRequest request) { String path = String.format("/api/2.0/mlflow/logged-models/%s/tags", request.getModelId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetLoggedModelTagsResponse.class); @@ -539,7 +507,7 @@ public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ExperimentPermissions.class); @@ -553,7 +521,7 @@ public void setTag(SetTag request) { String path = "/api/2.0/mlflow/runs/set-tag"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetTagResponse.class); @@ -567,7 +535,7 @@ public void updateExperiment(UpdateExperiment request) { String path = "/api/2.0/mlflow/experiments/update"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateExperimentResponse.class); @@ -581,7 +549,7 @@ public ExperimentPermissions updatePermissions(ExperimentPermissionsRequest requ String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ExperimentPermissions.class); @@ -595,7 +563,7 @@ public UpdateRunResponse updateRun(UpdateRun request) { String path = "/api/2.0/mlflow/runs/update"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateRunResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index abafed87e..06aae23be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -96,14 +96,6 @@ FinalizeLoggedModelResponse finalizeLoggedModel( */ GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); - /** Get credentials to download trace data. */ - GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest); - - /** Get credentials to upload trace data. */ - GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest); - /** * Get an experiment. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java index 1b6cef6c6..7953a91a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Metadata of a single artifact file or directory. */ @Generated +@JsonSerialize(using = FileInfo.FileInfoSerializer.class) +@JsonDeserialize(using = FileInfo.FileInfoDeserializer.class) public class FileInfo { /** The size in bytes of the file. Unset for directories. */ - @JsonProperty("file_size") private Long fileSize; /** Whether the path is a directory. */ - @JsonProperty("is_dir") private Boolean isDir; /** The path relative to the root artifact directory run. */ - @JsonProperty("path") private String path; public FileInfo setFileSize(Long fileSize) { @@ -72,4 +80,41 @@ public String toString() { .add("path", path) .toString(); } + + FileInfoPb toPb() { + FileInfoPb pb = new FileInfoPb(); + pb.setFileSize(fileSize); + pb.setIsDir(isDir); + pb.setPath(path); + + return pb; + } + + static FileInfo fromPb(FileInfoPb pb) { + FileInfo model = new FileInfo(); + model.setFileSize(pb.getFileSize()); + model.setIsDir(pb.getIsDir()); + model.setPath(pb.getPath()); + + return model; + } + + public static class FileInfoSerializer extends JsonSerializer { + @Override + public void serialize(FileInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileInfoDeserializer extends JsonDeserializer { + @Override + public FileInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileInfoPb pb = mapper.readValue(p, FileInfoPb.class); + return FileInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfoPb.java new file mode 100755 index 000000000..6755bbd3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfoPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metadata of a single artifact file or directory. */ +@Generated +class FileInfoPb { + @JsonProperty("file_size") + private Long fileSize; + + @JsonProperty("is_dir") + private Boolean isDir; + + @JsonProperty("path") + private String path; + + public FileInfoPb setFileSize(Long fileSize) { + this.fileSize = fileSize; + return this; + } + + public Long getFileSize() { + return fileSize; + } + + public FileInfoPb setIsDir(Boolean isDir) { + this.isDir = isDir; + return this; + } + + public Boolean getIsDir() { + return isDir; + } + + public FileInfoPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileInfoPb that = (FileInfoPb) o; + return Objects.equals(fileSize, that.fileSize) + && Objects.equals(isDir, that.isDir) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(fileSize, isDir, path); + } + + @Override + public String toString() { + return new ToStringer(FileInfoPb.class) + .add("fileSize", fileSize) + .add("isDir", isDir) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java index e57f1bbae..20d801642 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FinalizeLoggedModelRequest.FinalizeLoggedModelRequestSerializer.class) +@JsonDeserialize(using = FinalizeLoggedModelRequest.FinalizeLoggedModelRequestDeserializer.class) public class FinalizeLoggedModelRequest { /** The ID of the logged model to finalize. */ - @JsonIgnore private String modelId; + private String modelId; /** * Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that - * something went wrong when logging the model weights / agent code). + * something went wrong when logging the model weights / agent code. */ - @JsonProperty("status") private LoggedModelStatus status; public FinalizeLoggedModelRequest setModelId(String modelId) { @@ -58,4 +67,43 @@ public String toString() { .add("status", status) .toString(); } + + FinalizeLoggedModelRequestPb toPb() { + FinalizeLoggedModelRequestPb pb = new FinalizeLoggedModelRequestPb(); + pb.setModelId(modelId); + pb.setStatus(status); + + return pb; + } + + static FinalizeLoggedModelRequest fromPb(FinalizeLoggedModelRequestPb pb) { + FinalizeLoggedModelRequest model = new FinalizeLoggedModelRequest(); + model.setModelId(pb.getModelId()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class FinalizeLoggedModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + FinalizeLoggedModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FinalizeLoggedModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FinalizeLoggedModelRequestDeserializer + extends JsonDeserializer { + @Override + public FinalizeLoggedModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FinalizeLoggedModelRequestPb pb = mapper.readValue(p, FinalizeLoggedModelRequestPb.class); + return FinalizeLoggedModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequestPb.java new file mode 100755 index 000000000..315335722 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FinalizeLoggedModelRequestPb { + @JsonIgnore private String modelId; + + @JsonProperty("status") + private LoggedModelStatus status; + + public FinalizeLoggedModelRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public FinalizeLoggedModelRequestPb setStatus(LoggedModelStatus status) { + this.status = status; + return this; + } + + public LoggedModelStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FinalizeLoggedModelRequestPb that = (FinalizeLoggedModelRequestPb) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, status); + } + + @Override + public String toString() { + return new ToStringer(FinalizeLoggedModelRequestPb.class) + .add("modelId", modelId) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java index 295478ce5..7efd8dc32 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FinalizeLoggedModelResponse.FinalizeLoggedModelResponseSerializer.class) +@JsonDeserialize(using = FinalizeLoggedModelResponse.FinalizeLoggedModelResponseDeserializer.class) public class FinalizeLoggedModelResponse { /** The updated logged model. */ - @JsonProperty("model") private LoggedModel model; public FinalizeLoggedModelResponse setModel(LoggedModel model) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(FinalizeLoggedModelResponse.class).add("model", model).toString(); } + + FinalizeLoggedModelResponsePb toPb() { + FinalizeLoggedModelResponsePb pb = new FinalizeLoggedModelResponsePb(); + pb.setModel(model); + + return pb; + } + + static FinalizeLoggedModelResponse fromPb(FinalizeLoggedModelResponsePb pb) { + FinalizeLoggedModelResponse model = new FinalizeLoggedModelResponse(); + model.setModel(pb.getModel()); + + return model; + } + + public static class FinalizeLoggedModelResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + FinalizeLoggedModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FinalizeLoggedModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FinalizeLoggedModelResponseDeserializer + extends JsonDeserializer { + @Override + public FinalizeLoggedModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FinalizeLoggedModelResponsePb pb = mapper.readValue(p, FinalizeLoggedModelResponsePb.class); + return FinalizeLoggedModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponsePb.java new file mode 100755 index 000000000..739b2c2c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FinalizeLoggedModelResponsePb { + @JsonProperty("model") + private LoggedModel model; + + public FinalizeLoggedModelResponsePb setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FinalizeLoggedModelResponsePb that = (FinalizeLoggedModelResponsePb) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(FinalizeLoggedModelResponsePb.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java index 02d5fd61b..a4cf83c4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Represents a forecasting experiment with its unique identifier, URL, and state. */ @Generated +@JsonSerialize(using = ForecastingExperiment.ForecastingExperimentSerializer.class) +@JsonDeserialize(using = ForecastingExperiment.ForecastingExperimentDeserializer.class) public class ForecastingExperiment { /** The unique ID for the forecasting experiment. */ - @JsonProperty("experiment_id") private String experimentId; /** The URL to the forecasting experiment page. */ - @JsonProperty("experiment_page_url") private String experimentPageUrl; /** The current state of the forecasting experiment. */ - @JsonProperty("state") private ForecastingExperimentState state; public ForecastingExperiment setExperimentId(String experimentId) { @@ -72,4 +80,45 @@ public String toString() { .add("state", state) .toString(); } + + ForecastingExperimentPb toPb() { + ForecastingExperimentPb pb = new ForecastingExperimentPb(); + pb.setExperimentId(experimentId); + pb.setExperimentPageUrl(experimentPageUrl); + pb.setState(state); + + return pb; + } + + static ForecastingExperiment fromPb(ForecastingExperimentPb pb) { + ForecastingExperiment model = new ForecastingExperiment(); + model.setExperimentId(pb.getExperimentId()); + model.setExperimentPageUrl(pb.getExperimentPageUrl()); + model.setState(pb.getState()); + + return model; + } + + public static class ForecastingExperimentSerializer + extends JsonSerializer { + @Override + public void serialize( + ForecastingExperiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ForecastingExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ForecastingExperimentDeserializer + extends JsonDeserializer { + @Override + public ForecastingExperiment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ForecastingExperimentPb pb = mapper.readValue(p, ForecastingExperimentPb.class); + return ForecastingExperiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentPb.java new file mode 100755 index 000000000..d3546bb33 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a forecasting experiment with its unique identifier, URL, and state. */ +@Generated +class ForecastingExperimentPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("experiment_page_url") + private String experimentPageUrl; + + @JsonProperty("state") + private ForecastingExperimentState state; + + public ForecastingExperimentPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public ForecastingExperimentPb setExperimentPageUrl(String experimentPageUrl) { + this.experimentPageUrl = experimentPageUrl; + return this; + } + + public String getExperimentPageUrl() { + return experimentPageUrl; + } + + public ForecastingExperimentPb setState(ForecastingExperimentState state) { + this.state = state; + return this; + } + + public ForecastingExperimentState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForecastingExperimentPb that = (ForecastingExperimentPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(experimentPageUrl, that.experimentPageUrl) + && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, experimentPageUrl, state); + } + + @Override + public String toString() { + return new ToStringer(ForecastingExperimentPb.class) + .add("experimentId", experimentId) + .add("experimentPageUrl", experimentPageUrl) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java index 686aad0f4..f662836e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java @@ -22,7 +22,7 @@ public CreateForecastingExperimentResponse createExperiment( String path = "/api/2.0/automl/create-forecasting-experiment"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateForecastingExperimentResponse.class); @@ -37,7 +37,7 @@ public ForecastingExperiment getExperiment(GetForecastingExperimentRequest reque String.format("/api/2.0/automl/get-forecasting-experiment/%s", request.getExperimentId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ForecastingExperiment.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java index b3e7b7da2..bda334028 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an experiment by name */ @Generated +@JsonSerialize(using = GetByNameRequest.GetByNameRequestSerializer.class) +@JsonDeserialize(using = GetByNameRequest.GetByNameRequestDeserializer.class) public class GetByNameRequest { /** Name of the associated experiment. */ - @JsonIgnore - @QueryParam("experiment_name") private String experimentName; public GetByNameRequest setExperimentName(String experimentName) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetByNameRequest.class).add("experimentName", experimentName).toString(); } + + GetByNameRequestPb toPb() { + GetByNameRequestPb pb = new GetByNameRequestPb(); + pb.setExperimentName(experimentName); + + return pb; + } + + static GetByNameRequest fromPb(GetByNameRequestPb pb) { + GetByNameRequest model = new GetByNameRequest(); + model.setExperimentName(pb.getExperimentName()); + + return model; + } + + public static class GetByNameRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetByNameRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetByNameRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetByNameRequestDeserializer extends JsonDeserializer { + @Override + public GetByNameRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetByNameRequestPb pb = mapper.readValue(p, GetByNameRequestPb.class); + return GetByNameRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequestPb.java new file mode 100755 index 000000000..d6bbb4075 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an experiment by name */ +@Generated +class GetByNameRequestPb { + @JsonIgnore + @QueryParam("experiment_name") + private String experimentName; + + public GetByNameRequestPb setExperimentName(String experimentName) { + this.experimentName = experimentName; + return this; + } + + public String getExperimentName() { + return experimentName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetByNameRequestPb that = (GetByNameRequestPb) o; + return Objects.equals(experimentName, that.experimentName); + } + + @Override + public int hashCode() { + return Objects.hash(experimentName); + } + + @Override + public String toString() { + return new ToStringer(GetByNameRequestPb.class) + .add("experimentName", experimentName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java deleted file mode 100755 index 42aac217e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to download trace data */ -@Generated -public class GetCredentialsForTraceDataDownloadRequest { - /** The ID of the trace to fetch artifact download credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java deleted file mode 100755 index 839e04921..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class GetCredentialsForTraceDataDownloadResponse { - /** The artifact download credentials for the specified trace data. */ - @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; - - public GetCredentialsForTraceDataDownloadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { - this.credentialInfo = credentialInfo; - return this; - } - - public ArtifactCredentialInfo getCredentialInfo() { - return credentialInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadResponse that = - (GetCredentialsForTraceDataDownloadResponse) o; - return Objects.equals(credentialInfo, that.credentialInfo); - } - - @Override - public int hashCode() { - return Objects.hash(credentialInfo); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class) - .add("credentialInfo", credentialInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java deleted file mode 100755 index e7c6d452c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to upload trace data */ -@Generated -public class GetCredentialsForTraceDataUploadRequest { - /** The ID of the trace to fetch artifact upload credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java index c8c778488..32ffed69e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetExperimentByNameResponse.GetExperimentByNameResponseSerializer.class) +@JsonDeserialize(using = GetExperimentByNameResponse.GetExperimentByNameResponseDeserializer.class) public class GetExperimentByNameResponse { /** Experiment details. */ - @JsonProperty("experiment") private Experiment experiment; public GetExperimentByNameResponse setExperiment(Experiment experiment) { @@ -41,4 +51,41 @@ public String toString() { .add("experiment", experiment) .toString(); } + + GetExperimentByNameResponsePb toPb() { + GetExperimentByNameResponsePb pb = new GetExperimentByNameResponsePb(); + pb.setExperiment(experiment); + + return pb; + } + + static GetExperimentByNameResponse fromPb(GetExperimentByNameResponsePb pb) { + GetExperimentByNameResponse model = new GetExperimentByNameResponse(); + model.setExperiment(pb.getExperiment()); + + return model; + } + + public static class GetExperimentByNameResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExperimentByNameResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentByNameResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentByNameResponseDeserializer + extends JsonDeserializer { + @Override + public GetExperimentByNameResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentByNameResponsePb pb = mapper.readValue(p, GetExperimentByNameResponsePb.class); + return GetExperimentByNameResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponsePb.java new file mode 100755 index 000000000..359ab3f4b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetExperimentByNameResponsePb { + @JsonProperty("experiment") + private Experiment experiment; + + public GetExperimentByNameResponsePb setExperiment(Experiment experiment) { + this.experiment = experiment; + return this; + } + + public Experiment getExperiment() { + return experiment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentByNameResponsePb that = (GetExperimentByNameResponsePb) o; + return Objects.equals(experiment, that.experiment); + } + + @Override + public int hashCode() { + return Objects.hash(experiment); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentByNameResponsePb.class) + .add("experiment", experiment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java index 5f1692921..e34566cc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get experiment permission levels */ @Generated +@JsonSerialize( + using = + GetExperimentPermissionLevelsRequest.GetExperimentPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = + GetExperimentPermissionLevelsRequest.GetExperimentPermissionLevelsRequestDeserializer.class) public class GetExperimentPermissionLevelsRequest { /** The experiment for which to get or manage permissions. */ - @JsonIgnore private String experimentId; + private String experimentId; public GetExperimentPermissionLevelsRequest setExperimentId(String experimentId) { this.experimentId = experimentId; @@ -41,4 +56,42 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + GetExperimentPermissionLevelsRequestPb toPb() { + GetExperimentPermissionLevelsRequestPb pb = new GetExperimentPermissionLevelsRequestPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static GetExperimentPermissionLevelsRequest fromPb(GetExperimentPermissionLevelsRequestPb pb) { + GetExperimentPermissionLevelsRequest model = new GetExperimentPermissionLevelsRequest(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class GetExperimentPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExperimentPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetExperimentPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentPermissionLevelsRequestPb pb = + mapper.readValue(p, GetExperimentPermissionLevelsRequestPb.class); + return GetExperimentPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequestPb.java new file mode 100755 index 000000000..9a9c19983 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get experiment permission levels */ +@Generated +class GetExperimentPermissionLevelsRequestPb { + @JsonIgnore private String experimentId; + + public GetExperimentPermissionLevelsRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentPermissionLevelsRequestPb that = (GetExperimentPermissionLevelsRequestPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentPermissionLevelsRequestPb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java index 350aaedfb..79da0f805 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetExperimentPermissionLevelsResponse.GetExperimentPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetExperimentPermissionLevelsResponse.GetExperimentPermissionLevelsResponseDeserializer + .class) public class GetExperimentPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetExperimentPermissionLevelsResponse setPermissionLevels( @@ -43,4 +58,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetExperimentPermissionLevelsResponsePb toPb() { + GetExperimentPermissionLevelsResponsePb pb = new GetExperimentPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetExperimentPermissionLevelsResponse fromPb(GetExperimentPermissionLevelsResponsePb pb) { + GetExperimentPermissionLevelsResponse model = new GetExperimentPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetExperimentPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExperimentPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetExperimentPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentPermissionLevelsResponsePb pb = + mapper.readValue(p, GetExperimentPermissionLevelsResponsePb.class); + return GetExperimentPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponsePb.java new file mode 100755 index 000000000..a39f3f901 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetExperimentPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetExperimentPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentPermissionLevelsResponsePb that = (GetExperimentPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java index 39f0fcaba..4b4c26b48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get experiment permissions */ @Generated +@JsonSerialize( + using = GetExperimentPermissionsRequest.GetExperimentPermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetExperimentPermissionsRequest.GetExperimentPermissionsRequestDeserializer.class) public class GetExperimentPermissionsRequest { /** The experiment for which to get or manage permissions. */ - @JsonIgnore private String experimentId; + private String experimentId; public GetExperimentPermissionsRequest setExperimentId(String experimentId) { this.experimentId = experimentId; @@ -41,4 +54,42 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + GetExperimentPermissionsRequestPb toPb() { + GetExperimentPermissionsRequestPb pb = new GetExperimentPermissionsRequestPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static GetExperimentPermissionsRequest fromPb(GetExperimentPermissionsRequestPb pb) { + GetExperimentPermissionsRequest model = new GetExperimentPermissionsRequest(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class GetExperimentPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExperimentPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetExperimentPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentPermissionsRequestPb pb = + mapper.readValue(p, GetExperimentPermissionsRequestPb.class); + return GetExperimentPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequestPb.java new file mode 100755 index 000000000..742d2d84a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get experiment permissions */ +@Generated +class GetExperimentPermissionsRequestPb { + @JsonIgnore private String experimentId; + + public GetExperimentPermissionsRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentPermissionsRequestPb that = (GetExperimentPermissionsRequestPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentPermissionsRequestPb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java index 459b4a3ff..a6c7197e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an experiment */ @Generated +@JsonSerialize(using = GetExperimentRequest.GetExperimentRequestSerializer.class) +@JsonDeserialize(using = GetExperimentRequest.GetExperimentRequestDeserializer.class) public class GetExperimentRequest { /** ID of the associated experiment. */ - @JsonIgnore - @QueryParam("experiment_id") private String experimentId; public GetExperimentRequest setExperimentId(String experimentId) { @@ -42,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetExperimentRequest.class).add("experimentId", experimentId).toString(); } + + GetExperimentRequestPb toPb() { + GetExperimentRequestPb pb = new GetExperimentRequestPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static GetExperimentRequest fromPb(GetExperimentRequestPb pb) { + GetExperimentRequest model = new GetExperimentRequest(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class GetExperimentRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GetExperimentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentRequestDeserializer + extends JsonDeserializer { + @Override + public GetExperimentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentRequestPb pb = mapper.readValue(p, GetExperimentRequestPb.class); + return GetExperimentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequestPb.java new file mode 100755 index 000000000..8db9b36c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an experiment */ +@Generated +class GetExperimentRequestPb { + @JsonIgnore + @QueryParam("experiment_id") + private String experimentId; + + public GetExperimentRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentRequestPb that = (GetExperimentRequestPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentRequestPb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java index 08cb4ad51..3bed73ccc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetExperimentResponse.GetExperimentResponseSerializer.class) +@JsonDeserialize(using = GetExperimentResponse.GetExperimentResponseDeserializer.class) public class GetExperimentResponse { /** Experiment details. */ - @JsonProperty("experiment") private Experiment experiment; public GetExperimentResponse setExperiment(Experiment experiment) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetExperimentResponse.class).add("experiment", experiment).toString(); } + + GetExperimentResponsePb toPb() { + GetExperimentResponsePb pb = new GetExperimentResponsePb(); + pb.setExperiment(experiment); + + return pb; + } + + static GetExperimentResponse fromPb(GetExperimentResponsePb pb) { + GetExperimentResponse model = new GetExperimentResponse(); + model.setExperiment(pb.getExperiment()); + + return model; + } + + public static class GetExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public GetExperimentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetExperimentResponsePb pb = mapper.readValue(p, GetExperimentResponsePb.class); + return GetExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponsePb.java new file mode 100755 index 000000000..f64364d93 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetExperimentResponsePb { + @JsonProperty("experiment") + private Experiment experiment; + + public GetExperimentResponsePb setExperiment(Experiment experiment) { + this.experiment = experiment; + return this; + } + + public Experiment getExperiment() { + return experiment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentResponsePb that = (GetExperimentResponsePb) o; + return Objects.equals(experiment, that.experiment); + } + + @Override + public int hashCode() { + return Objects.hash(experiment); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentResponsePb.class).add("experiment", experiment).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java index bf12ad25a..e67a72251 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a forecasting experiment */ @Generated +@JsonSerialize( + using = GetForecastingExperimentRequest.GetForecastingExperimentRequestSerializer.class) +@JsonDeserialize( + using = GetForecastingExperimentRequest.GetForecastingExperimentRequestDeserializer.class) public class GetForecastingExperimentRequest { /** The unique ID of a forecasting experiment */ - @JsonIgnore private String experimentId; + private String experimentId; public GetForecastingExperimentRequest setExperimentId(String experimentId) { this.experimentId = experimentId; @@ -41,4 +54,42 @@ public String toString() { .add("experimentId", experimentId) .toString(); } + + GetForecastingExperimentRequestPb toPb() { + GetForecastingExperimentRequestPb pb = new GetForecastingExperimentRequestPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static GetForecastingExperimentRequest fromPb(GetForecastingExperimentRequestPb pb) { + GetForecastingExperimentRequest model = new GetForecastingExperimentRequest(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class GetForecastingExperimentRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetForecastingExperimentRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetForecastingExperimentRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetForecastingExperimentRequestDeserializer + extends JsonDeserializer { + @Override + public GetForecastingExperimentRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetForecastingExperimentRequestPb pb = + mapper.readValue(p, GetForecastingExperimentRequestPb.class); + return GetForecastingExperimentRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequestPb.java new file mode 100755 index 000000000..e8b6c25aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a forecasting experiment */ +@Generated +class GetForecastingExperimentRequestPb { + @JsonIgnore private String experimentId; + + public GetForecastingExperimentRequestPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetForecastingExperimentRequestPb that = (GetForecastingExperimentRequestPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(GetForecastingExperimentRequestPb.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java index 60f597c20..616758dbb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java @@ -3,43 +3,43 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get metric history for a run */ @Generated +@JsonSerialize(using = GetHistoryRequest.GetHistoryRequestSerializer.class) +@JsonDeserialize(using = GetHistoryRequest.GetHistoryRequestDeserializer.class) public class GetHistoryRequest { /** * Maximum number of Metric records to return per paginated request. Default is set to 25,000. If * set higher than 25,000, a request Exception will be raised. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Name of the metric. */ - @JsonIgnore - @QueryParam("metric_key") private String metricKey; /** Token indicating the page of metric histories to fetch. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** ID of the run from which to fetch metric values. Must be provided. */ - @JsonIgnore - @QueryParam("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field * will be removed in a future MLflow version. */ - @JsonIgnore - @QueryParam("run_uuid") private String runUuid; public GetHistoryRequest setMaxResults(Long maxResults) { @@ -114,4 +114,46 @@ public String toString() { .add("runUuid", runUuid) .toString(); } + + GetHistoryRequestPb toPb() { + GetHistoryRequestPb pb = new GetHistoryRequestPb(); + pb.setMaxResults(maxResults); + pb.setMetricKey(metricKey); + pb.setPageToken(pageToken); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + + return pb; + } + + static GetHistoryRequest fromPb(GetHistoryRequestPb pb) { + GetHistoryRequest model = new GetHistoryRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setMetricKey(pb.getMetricKey()); + model.setPageToken(pb.getPageToken()); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + + return model; + } + + public static class GetHistoryRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetHistoryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetHistoryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetHistoryRequestDeserializer extends JsonDeserializer { + @Override + public GetHistoryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetHistoryRequestPb pb = mapper.readValue(p, GetHistoryRequestPb.class); + return GetHistoryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequestPb.java new file mode 100755 index 000000000..7e46015a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequestPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get metric history for a run */ +@Generated +class GetHistoryRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("metric_key") + private String metricKey; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("run_id") + private String runId; + + @JsonIgnore + @QueryParam("run_uuid") + private String runUuid; + + public GetHistoryRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public GetHistoryRequestPb setMetricKey(String metricKey) { + this.metricKey = metricKey; + return this; + } + + public String getMetricKey() { + return metricKey; + } + + public GetHistoryRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GetHistoryRequestPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public GetHistoryRequestPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetHistoryRequestPb that = (GetHistoryRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(metricKey, that.metricKey) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(runId, that.runId) + && Objects.equals(runUuid, that.runUuid); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, metricKey, pageToken, runId, runUuid); + } + + @Override + public String toString() { + return new ToStringer(GetHistoryRequestPb.class) + .add("maxResults", maxResults) + .add("metricKey", metricKey) + .add("pageToken", pageToken) + .add("runId", runId) + .add("runUuid", runUuid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java index c861a3c78..14ae1e91c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetLatestVersionsRequest.GetLatestVersionsRequestSerializer.class) +@JsonDeserialize(using = GetLatestVersionsRequest.GetLatestVersionsRequestDeserializer.class) public class GetLatestVersionsRequest { /** Registered model unique name identifier. */ - @JsonProperty("name") private String name; /** List of stages. */ - @JsonProperty("stages") private Collection stages; public GetLatestVersionsRequest setName(String name) { @@ -56,4 +65,43 @@ public String toString() { .add("stages", stages) .toString(); } + + GetLatestVersionsRequestPb toPb() { + GetLatestVersionsRequestPb pb = new GetLatestVersionsRequestPb(); + pb.setName(name); + pb.setStages(stages); + + return pb; + } + + static GetLatestVersionsRequest fromPb(GetLatestVersionsRequestPb pb) { + GetLatestVersionsRequest model = new GetLatestVersionsRequest(); + model.setName(pb.getName()); + model.setStages(pb.getStages()); + + return model; + } + + public static class GetLatestVersionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLatestVersionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetLatestVersionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLatestVersionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetLatestVersionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLatestVersionsRequestPb pb = mapper.readValue(p, GetLatestVersionsRequestPb.class); + return GetLatestVersionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequestPb.java new file mode 100755 index 000000000..f1297eb75 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetLatestVersionsRequestPb { + @JsonProperty("name") + private String name; + + @JsonProperty("stages") + private Collection stages; + + public GetLatestVersionsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetLatestVersionsRequestPb setStages(Collection stages) { + this.stages = stages; + return this; + } + + public Collection getStages() { + return stages; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLatestVersionsRequestPb that = (GetLatestVersionsRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(stages, that.stages); + } + + @Override + public int hashCode() { + return Objects.hash(name, stages); + } + + @Override + public String toString() { + return new ToStringer(GetLatestVersionsRequestPb.class) + .add("name", name) + .add("stages", stages) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java index 6ab2f29c0..55fc3c734 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetLatestVersionsResponse.GetLatestVersionsResponseSerializer.class) +@JsonDeserialize(using = GetLatestVersionsResponse.GetLatestVersionsResponseDeserializer.class) public class GetLatestVersionsResponse { /** * Latest version models for each requests stage. Only return models with current `READY` status. * If no `stages` provided, returns the latest version for each stage, including `"None"`. */ - @JsonProperty("model_versions") private Collection modelVersions; public GetLatestVersionsResponse setModelVersions(Collection modelVersions) { @@ -45,4 +55,41 @@ public String toString() { .add("modelVersions", modelVersions) .toString(); } + + GetLatestVersionsResponsePb toPb() { + GetLatestVersionsResponsePb pb = new GetLatestVersionsResponsePb(); + pb.setModelVersions(modelVersions); + + return pb; + } + + static GetLatestVersionsResponse fromPb(GetLatestVersionsResponsePb pb) { + GetLatestVersionsResponse model = new GetLatestVersionsResponse(); + model.setModelVersions(pb.getModelVersions()); + + return model; + } + + public static class GetLatestVersionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLatestVersionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetLatestVersionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLatestVersionsResponseDeserializer + extends JsonDeserializer { + @Override + public GetLatestVersionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLatestVersionsResponsePb pb = mapper.readValue(p, GetLatestVersionsResponsePb.class); + return GetLatestVersionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponsePb.java new file mode 100755 index 000000000..d063ef064 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetLatestVersionsResponsePb { + @JsonProperty("model_versions") + private Collection modelVersions; + + public GetLatestVersionsResponsePb setModelVersions(Collection modelVersions) { + this.modelVersions = modelVersions; + return this; + } + + public Collection getModelVersions() { + return modelVersions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLatestVersionsResponsePb that = (GetLatestVersionsResponsePb) o; + return Objects.equals(modelVersions, that.modelVersions); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersions); + } + + @Override + public String toString() { + return new ToStringer(GetLatestVersionsResponsePb.class) + .add("modelVersions", modelVersions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java index e2f070898..77be54911 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a logged model */ @Generated +@JsonSerialize(using = GetLoggedModelRequest.GetLoggedModelRequestSerializer.class) +@JsonDeserialize(using = GetLoggedModelRequest.GetLoggedModelRequestDeserializer.class) public class GetLoggedModelRequest { /** The ID of the logged model to retrieve. */ - @JsonIgnore private String modelId; + private String modelId; public GetLoggedModelRequest setModelId(String modelId) { this.modelId = modelId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetLoggedModelRequest.class).add("modelId", modelId).toString(); } + + GetLoggedModelRequestPb toPb() { + GetLoggedModelRequestPb pb = new GetLoggedModelRequestPb(); + pb.setModelId(modelId); + + return pb; + } + + static GetLoggedModelRequest fromPb(GetLoggedModelRequestPb pb) { + GetLoggedModelRequest model = new GetLoggedModelRequest(); + model.setModelId(pb.getModelId()); + + return model; + } + + public static class GetLoggedModelRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLoggedModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetLoggedModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLoggedModelRequestDeserializer + extends JsonDeserializer { + @Override + public GetLoggedModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLoggedModelRequestPb pb = mapper.readValue(p, GetLoggedModelRequestPb.class); + return GetLoggedModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequestPb.java new file mode 100755 index 000000000..77ace2b32 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a logged model */ +@Generated +class GetLoggedModelRequestPb { + @JsonIgnore private String modelId; + + public GetLoggedModelRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelRequestPb that = (GetLoggedModelRequestPb) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelRequestPb.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java index afb45504b..ab6732573 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetLoggedModelResponse.GetLoggedModelResponseSerializer.class) +@JsonDeserialize(using = GetLoggedModelResponse.GetLoggedModelResponseDeserializer.class) public class GetLoggedModelResponse { /** The retrieved logged model. */ - @JsonProperty("model") private LoggedModel model; public GetLoggedModelResponse setModel(LoggedModel model) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetLoggedModelResponse.class).add("model", model).toString(); } + + GetLoggedModelResponsePb toPb() { + GetLoggedModelResponsePb pb = new GetLoggedModelResponsePb(); + pb.setModel(model); + + return pb; + } + + static GetLoggedModelResponse fromPb(GetLoggedModelResponsePb pb) { + GetLoggedModelResponse model = new GetLoggedModelResponse(); + model.setModel(pb.getModel()); + + return model; + } + + public static class GetLoggedModelResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLoggedModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetLoggedModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLoggedModelResponseDeserializer + extends JsonDeserializer { + @Override + public GetLoggedModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLoggedModelResponsePb pb = mapper.readValue(p, GetLoggedModelResponsePb.class); + return GetLoggedModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponsePb.java new file mode 100755 index 000000000..ce6508972 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetLoggedModelResponsePb { + @JsonProperty("model") + private LoggedModel model; + + public GetLoggedModelResponsePb setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelResponsePb that = (GetLoggedModelResponsePb) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelResponsePb.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java index 22c1f2388..2bfd4d9c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetMetricHistoryResponse.GetMetricHistoryResponseSerializer.class) +@JsonDeserialize(using = GetMetricHistoryResponse.GetMetricHistoryResponseDeserializer.class) public class GetMetricHistoryResponse { /** * All logged values for this metric if `max_results` is not specified in the request or if the * total count of metrics returned is less than the service level pagination threshold. Otherwise, * this is one page of results. */ - @JsonProperty("metrics") private Collection metrics; /** * A token that can be used to issue a query for the next page of metric history values. A missing * token indicates that no additional metrics are available to fetch. */ - @JsonProperty("next_page_token") private String nextPageToken; public GetMetricHistoryResponse setMetrics(Collection metrics) { @@ -64,4 +73,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetMetricHistoryResponsePb toPb() { + GetMetricHistoryResponsePb pb = new GetMetricHistoryResponsePb(); + pb.setMetrics(metrics); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetMetricHistoryResponse fromPb(GetMetricHistoryResponsePb pb) { + GetMetricHistoryResponse model = new GetMetricHistoryResponse(); + model.setMetrics(pb.getMetrics()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetMetricHistoryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetMetricHistoryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetMetricHistoryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetMetricHistoryResponseDeserializer + extends JsonDeserializer { + @Override + public GetMetricHistoryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetMetricHistoryResponsePb pb = mapper.readValue(p, GetMetricHistoryResponsePb.class); + return GetMetricHistoryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponsePb.java new file mode 100755 index 000000000..f93e96699 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetMetricHistoryResponsePb { + @JsonProperty("metrics") + private Collection metrics; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetMetricHistoryResponsePb setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public GetMetricHistoryResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMetricHistoryResponsePb that = (GetMetricHistoryResponsePb) o; + return Objects.equals(metrics, that.metrics) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(metrics, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetMetricHistoryResponsePb.class) + .add("metrics", metrics) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java index a4535382a..ab25685b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get model */ @Generated +@JsonSerialize(using = GetModelRequest.GetModelRequestSerializer.class) +@JsonDeserialize(using = GetModelRequest.GetModelRequestDeserializer.class) public class GetModelRequest { /** Registered model unique name identifier. */ - @JsonIgnore - @QueryParam("name") private String name; public GetModelRequest setName(String name) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetModelRequest.class).add("name", name).toString(); } + + GetModelRequestPb toPb() { + GetModelRequestPb pb = new GetModelRequestPb(); + pb.setName(name); + + return pb; + } + + static GetModelRequest fromPb(GetModelRequestPb pb) { + GetModelRequest model = new GetModelRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetModelRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelRequestDeserializer extends JsonDeserializer { + @Override + public GetModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelRequestPb pb = mapper.readValue(p, GetModelRequestPb.class); + return GetModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequestPb.java new file mode 100755 index 000000000..1846e7ced --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get model */ +@Generated +class GetModelRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + public GetModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelRequestPb that = (GetModelRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetModelRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java index d80f74636..fbb33dc45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetModelResponse.GetModelResponseSerializer.class) +@JsonDeserialize(using = GetModelResponse.GetModelResponseDeserializer.class) public class GetModelResponse { /** */ - @JsonProperty("registered_model_databricks") private ModelDatabricks registeredModelDatabricks; public GetModelResponse setRegisteredModelDatabricks(ModelDatabricks registeredModelDatabricks) { @@ -41,4 +51,38 @@ public String toString() { .add("registeredModelDatabricks", registeredModelDatabricks) .toString(); } + + GetModelResponsePb toPb() { + GetModelResponsePb pb = new GetModelResponsePb(); + pb.setRegisteredModelDatabricks(registeredModelDatabricks); + + return pb; + } + + static GetModelResponse fromPb(GetModelResponsePb pb) { + GetModelResponse model = new GetModelResponse(); + model.setRegisteredModelDatabricks(pb.getRegisteredModelDatabricks()); + + return model; + } + + public static class GetModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelResponseDeserializer extends JsonDeserializer { + @Override + public GetModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelResponsePb pb = mapper.readValue(p, GetModelResponsePb.class); + return GetModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponsePb.java new file mode 100755 index 000000000..98c15017b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetModelResponsePb { + @JsonProperty("registered_model_databricks") + private ModelDatabricks registeredModelDatabricks; + + public GetModelResponsePb setRegisteredModelDatabricks( + ModelDatabricks registeredModelDatabricks) { + this.registeredModelDatabricks = registeredModelDatabricks; + return this; + } + + public ModelDatabricks getRegisteredModelDatabricks() { + return registeredModelDatabricks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelResponsePb that = (GetModelResponsePb) o; + return Objects.equals(registeredModelDatabricks, that.registeredModelDatabricks); + } + + @Override + public int hashCode() { + return Objects.hash(registeredModelDatabricks); + } + + @Override + public String toString() { + return new ToStringer(GetModelResponsePb.class) + .add("registeredModelDatabricks", registeredModelDatabricks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequest.java index 1d2142e2f..b9a735afc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequest.java @@ -3,22 +3,30 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a model version URI */ @Generated +@JsonSerialize( + using = GetModelVersionDownloadUriRequest.GetModelVersionDownloadUriRequestSerializer.class) +@JsonDeserialize( + using = GetModelVersionDownloadUriRequest.GetModelVersionDownloadUriRequestDeserializer.class) public class GetModelVersionDownloadUriRequest { /** Name of the registered model */ - @JsonIgnore - @QueryParam("name") private String name; /** Model version number */ - @JsonIgnore - @QueryParam("version") private String version; public GetModelVersionDownloadUriRequest setName(String name) { @@ -59,4 +67,44 @@ public String toString() { .add("version", version) .toString(); } + + GetModelVersionDownloadUriRequestPb toPb() { + GetModelVersionDownloadUriRequestPb pb = new GetModelVersionDownloadUriRequestPb(); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static GetModelVersionDownloadUriRequest fromPb(GetModelVersionDownloadUriRequestPb pb) { + GetModelVersionDownloadUriRequest model = new GetModelVersionDownloadUriRequest(); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class GetModelVersionDownloadUriRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetModelVersionDownloadUriRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelVersionDownloadUriRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelVersionDownloadUriRequestDeserializer + extends JsonDeserializer { + @Override + public GetModelVersionDownloadUriRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelVersionDownloadUriRequestPb pb = + mapper.readValue(p, GetModelVersionDownloadUriRequestPb.class); + return GetModelVersionDownloadUriRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequestPb.java new file mode 100755 index 000000000..4473aa1ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a model version URI */ +@Generated +class GetModelVersionDownloadUriRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("version") + private String version; + + public GetModelVersionDownloadUriRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetModelVersionDownloadUriRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelVersionDownloadUriRequestPb that = (GetModelVersionDownloadUriRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + @Override + public String toString() { + return new ToStringer(GetModelVersionDownloadUriRequestPb.class) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponse.java index 6a3ba2c3a..42814c936 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GetModelVersionDownloadUriResponse.GetModelVersionDownloadUriResponseSerializer.class) +@JsonDeserialize( + using = GetModelVersionDownloadUriResponse.GetModelVersionDownloadUriResponseDeserializer.class) public class GetModelVersionDownloadUriResponse { /** URI corresponding to where artifacts for this model version are stored. */ - @JsonProperty("artifact_uri") private String artifactUri; public GetModelVersionDownloadUriResponse setArtifactUri(String artifactUri) { @@ -41,4 +53,42 @@ public String toString() { .add("artifactUri", artifactUri) .toString(); } + + GetModelVersionDownloadUriResponsePb toPb() { + GetModelVersionDownloadUriResponsePb pb = new GetModelVersionDownloadUriResponsePb(); + pb.setArtifactUri(artifactUri); + + return pb; + } + + static GetModelVersionDownloadUriResponse fromPb(GetModelVersionDownloadUriResponsePb pb) { + GetModelVersionDownloadUriResponse model = new GetModelVersionDownloadUriResponse(); + model.setArtifactUri(pb.getArtifactUri()); + + return model; + } + + public static class GetModelVersionDownloadUriResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetModelVersionDownloadUriResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelVersionDownloadUriResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelVersionDownloadUriResponseDeserializer + extends JsonDeserializer { + @Override + public GetModelVersionDownloadUriResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelVersionDownloadUriResponsePb pb = + mapper.readValue(p, GetModelVersionDownloadUriResponsePb.class); + return GetModelVersionDownloadUriResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponsePb.java new file mode 100755 index 000000000..455c5db19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetModelVersionDownloadUriResponsePb { + @JsonProperty("artifact_uri") + private String artifactUri; + + public GetModelVersionDownloadUriResponsePb setArtifactUri(String artifactUri) { + this.artifactUri = artifactUri; + return this; + } + + public String getArtifactUri() { + return artifactUri; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelVersionDownloadUriResponsePb that = (GetModelVersionDownloadUriResponsePb) o; + return Objects.equals(artifactUri, that.artifactUri); + } + + @Override + public int hashCode() { + return Objects.hash(artifactUri); + } + + @Override + public String toString() { + return new ToStringer(GetModelVersionDownloadUriResponsePb.class) + .add("artifactUri", artifactUri) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequest.java index da9b7df1d..82e6a4354 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a model version */ @Generated +@JsonSerialize(using = GetModelVersionRequest.GetModelVersionRequestSerializer.class) +@JsonDeserialize(using = GetModelVersionRequest.GetModelVersionRequestDeserializer.class) public class GetModelVersionRequest { /** Name of the registered model */ - @JsonIgnore - @QueryParam("name") private String name; /** Model version number */ - @JsonIgnore - @QueryParam("version") private String version; public GetModelVersionRequest setName(String name) { @@ -59,4 +65,43 @@ public String toString() { .add("version", version) .toString(); } + + GetModelVersionRequestPb toPb() { + GetModelVersionRequestPb pb = new GetModelVersionRequestPb(); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static GetModelVersionRequest fromPb(GetModelVersionRequestPb pb) { + GetModelVersionRequest model = new GetModelVersionRequest(); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class GetModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public GetModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelVersionRequestPb pb = mapper.readValue(p, GetModelVersionRequestPb.class); + return GetModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequestPb.java new file mode 100755 index 000000000..85e903b5f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a model version */ +@Generated +class GetModelVersionRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("version") + private String version; + + public GetModelVersionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetModelVersionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelVersionRequestPb that = (GetModelVersionRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + @Override + public String toString() { + return new ToStringer(GetModelVersionRequestPb.class) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponse.java index c764ad14b..2fe50ee44 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetModelVersionResponse.GetModelVersionResponseSerializer.class) +@JsonDeserialize(using = GetModelVersionResponse.GetModelVersionResponseDeserializer.class) public class GetModelVersionResponse { /** */ - @JsonProperty("model_version") private ModelVersion modelVersion; public GetModelVersionResponse setModelVersion(ModelVersion modelVersion) { @@ -41,4 +51,41 @@ public String toString() { .add("modelVersion", modelVersion) .toString(); } + + GetModelVersionResponsePb toPb() { + GetModelVersionResponsePb pb = new GetModelVersionResponsePb(); + pb.setModelVersion(modelVersion); + + return pb; + } + + static GetModelVersionResponse fromPb(GetModelVersionResponsePb pb) { + GetModelVersionResponse model = new GetModelVersionResponse(); + model.setModelVersion(pb.getModelVersion()); + + return model; + } + + public static class GetModelVersionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetModelVersionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetModelVersionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetModelVersionResponseDeserializer + extends JsonDeserializer { + @Override + public GetModelVersionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetModelVersionResponsePb pb = mapper.readValue(p, GetModelVersionResponsePb.class); + return GetModelVersionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponsePb.java new file mode 100755 index 000000000..c086062ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetModelVersionResponsePb { + @JsonProperty("model_version") + private ModelVersion modelVersion; + + public GetModelVersionResponsePb setModelVersion(ModelVersion modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public ModelVersion getModelVersion() { + return modelVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetModelVersionResponsePb that = (GetModelVersionResponsePb) o; + return Objects.equals(modelVersion, that.modelVersion); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersion); + } + + @Override + public String toString() { + return new ToStringer(GetModelVersionResponsePb.class) + .add("modelVersion", modelVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequest.java index 77ff13346..bcf2feb50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get registered model permission levels */ @Generated +@JsonSerialize( + using = + GetRegisteredModelPermissionLevelsRequest + .GetRegisteredModelPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = + GetRegisteredModelPermissionLevelsRequest + .GetRegisteredModelPermissionLevelsRequestDeserializer.class) public class GetRegisteredModelPermissionLevelsRequest { /** The registered model for which to get or manage permissions. */ - @JsonIgnore private String registeredModelId; + private String registeredModelId; public GetRegisteredModelPermissionLevelsRequest setRegisteredModelId(String registeredModelId) { this.registeredModelId = registeredModelId; @@ -41,4 +58,47 @@ public String toString() { .add("registeredModelId", registeredModelId) .toString(); } + + GetRegisteredModelPermissionLevelsRequestPb toPb() { + GetRegisteredModelPermissionLevelsRequestPb pb = + new GetRegisteredModelPermissionLevelsRequestPb(); + pb.setRegisteredModelId(registeredModelId); + + return pb; + } + + static GetRegisteredModelPermissionLevelsRequest fromPb( + GetRegisteredModelPermissionLevelsRequestPb pb) { + GetRegisteredModelPermissionLevelsRequest model = + new GetRegisteredModelPermissionLevelsRequest(); + model.setRegisteredModelId(pb.getRegisteredModelId()); + + return model; + } + + public static class GetRegisteredModelPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRegisteredModelPermissionLevelsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetRegisteredModelPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRegisteredModelPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetRegisteredModelPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRegisteredModelPermissionLevelsRequestPb pb = + mapper.readValue(p, GetRegisteredModelPermissionLevelsRequestPb.class); + return GetRegisteredModelPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequestPb.java new file mode 100755 index 000000000..7cccbde4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get registered model permission levels */ +@Generated +class GetRegisteredModelPermissionLevelsRequestPb { + @JsonIgnore private String registeredModelId; + + public GetRegisteredModelPermissionLevelsRequestPb setRegisteredModelId( + String registeredModelId) { + this.registeredModelId = registeredModelId; + return this; + } + + public String getRegisteredModelId() { + return registeredModelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRegisteredModelPermissionLevelsRequestPb that = + (GetRegisteredModelPermissionLevelsRequestPb) o; + return Objects.equals(registeredModelId, that.registeredModelId); + } + + @Override + public int hashCode() { + return Objects.hash(registeredModelId); + } + + @Override + public String toString() { + return new ToStringer(GetRegisteredModelPermissionLevelsRequestPb.class) + .add("registeredModelId", registeredModelId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponse.java index 98a9198bc..3b7d431dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetRegisteredModelPermissionLevelsResponse + .GetRegisteredModelPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetRegisteredModelPermissionLevelsResponse + .GetRegisteredModelPermissionLevelsResponseDeserializer.class) public class GetRegisteredModelPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetRegisteredModelPermissionLevelsResponse setPermissionLevels( @@ -44,4 +60,47 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetRegisteredModelPermissionLevelsResponsePb toPb() { + GetRegisteredModelPermissionLevelsResponsePb pb = + new GetRegisteredModelPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetRegisteredModelPermissionLevelsResponse fromPb( + GetRegisteredModelPermissionLevelsResponsePb pb) { + GetRegisteredModelPermissionLevelsResponse model = + new GetRegisteredModelPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetRegisteredModelPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRegisteredModelPermissionLevelsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetRegisteredModelPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRegisteredModelPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetRegisteredModelPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRegisteredModelPermissionLevelsResponsePb pb = + mapper.readValue(p, GetRegisteredModelPermissionLevelsResponsePb.class); + return GetRegisteredModelPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponsePb.java new file mode 100755 index 000000000..a9c9e7294 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponsePb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetRegisteredModelPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetRegisteredModelPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRegisteredModelPermissionLevelsResponsePb that = + (GetRegisteredModelPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetRegisteredModelPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequest.java index 54b5ac09e..da1d01b1b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get registered model permissions */ @Generated +@JsonSerialize( + using = + GetRegisteredModelPermissionsRequest.GetRegisteredModelPermissionsRequestSerializer.class) +@JsonDeserialize( + using = + GetRegisteredModelPermissionsRequest.GetRegisteredModelPermissionsRequestDeserializer.class) public class GetRegisteredModelPermissionsRequest { /** The registered model for which to get or manage permissions. */ - @JsonIgnore private String registeredModelId; + private String registeredModelId; public GetRegisteredModelPermissionsRequest setRegisteredModelId(String registeredModelId) { this.registeredModelId = registeredModelId; @@ -41,4 +56,42 @@ public String toString() { .add("registeredModelId", registeredModelId) .toString(); } + + GetRegisteredModelPermissionsRequestPb toPb() { + GetRegisteredModelPermissionsRequestPb pb = new GetRegisteredModelPermissionsRequestPb(); + pb.setRegisteredModelId(registeredModelId); + + return pb; + } + + static GetRegisteredModelPermissionsRequest fromPb(GetRegisteredModelPermissionsRequestPb pb) { + GetRegisteredModelPermissionsRequest model = new GetRegisteredModelPermissionsRequest(); + model.setRegisteredModelId(pb.getRegisteredModelId()); + + return model; + } + + public static class GetRegisteredModelPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRegisteredModelPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRegisteredModelPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRegisteredModelPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetRegisteredModelPermissionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRegisteredModelPermissionsRequestPb pb = + mapper.readValue(p, GetRegisteredModelPermissionsRequestPb.class); + return GetRegisteredModelPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequestPb.java new file mode 100755 index 000000000..602e072ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get registered model permissions */ +@Generated +class GetRegisteredModelPermissionsRequestPb { + @JsonIgnore private String registeredModelId; + + public GetRegisteredModelPermissionsRequestPb setRegisteredModelId(String registeredModelId) { + this.registeredModelId = registeredModelId; + return this; + } + + public String getRegisteredModelId() { + return registeredModelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRegisteredModelPermissionsRequestPb that = (GetRegisteredModelPermissionsRequestPb) o; + return Objects.equals(registeredModelId, that.registeredModelId); + } + + @Override + public int hashCode() { + return Objects.hash(registeredModelId); + } + + @Override + public String toString() { + return new ToStringer(GetRegisteredModelPermissionsRequestPb.class) + .add("registeredModelId", registeredModelId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java index 3206a8d1a..494b47b35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java @@ -3,25 +3,31 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a run */ @Generated +@JsonSerialize(using = GetRunRequest.GetRunRequestSerializer.class) +@JsonDeserialize(using = GetRunRequest.GetRunRequestDeserializer.class) public class GetRunRequest { /** ID of the run to fetch. Must be provided. */ - @JsonIgnore - @QueryParam("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a * future MLflow version. */ - @JsonIgnore - @QueryParam("run_uuid") private String runUuid; public GetRunRequest setRunId(String runId) { @@ -62,4 +68,39 @@ public String toString() { .add("runUuid", runUuid) .toString(); } + + GetRunRequestPb toPb() { + GetRunRequestPb pb = new GetRunRequestPb(); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + + return pb; + } + + static GetRunRequest fromPb(GetRunRequestPb pb) { + GetRunRequest model = new GetRunRequest(); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + + return model; + } + + public static class GetRunRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRunRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRunRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRunRequestDeserializer extends JsonDeserializer { + @Override + public GetRunRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRunRequestPb pb = mapper.readValue(p, GetRunRequestPb.class); + return GetRunRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequestPb.java new file mode 100755 index 000000000..2d31f5cc4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a run */ +@Generated +class GetRunRequestPb { + @JsonIgnore + @QueryParam("run_id") + private String runId; + + @JsonIgnore + @QueryParam("run_uuid") + private String runUuid; + + public GetRunRequestPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public GetRunRequestPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRunRequestPb that = (GetRunRequestPb) o; + return Objects.equals(runId, that.runId) && Objects.equals(runUuid, that.runUuid); + } + + @Override + public int hashCode() { + return Objects.hash(runId, runUuid); + } + + @Override + public String toString() { + return new ToStringer(GetRunRequestPb.class) + .add("runId", runId) + .add("runUuid", runUuid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponse.java index 6942ed1ea..7494bf99b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetRunResponse.GetRunResponseSerializer.class) +@JsonDeserialize(using = GetRunResponse.GetRunResponseDeserializer.class) public class GetRunResponse { /** Run metadata (name, start time, etc) and data (metrics, params, and tags). */ - @JsonProperty("run") private Run run; public GetRunResponse setRun(Run run) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetRunResponse.class).add("run", run).toString(); } + + GetRunResponsePb toPb() { + GetRunResponsePb pb = new GetRunResponsePb(); + pb.setRun(run); + + return pb; + } + + static GetRunResponse fromPb(GetRunResponsePb pb) { + GetRunResponse model = new GetRunResponse(); + model.setRun(pb.getRun()); + + return model; + } + + public static class GetRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRunResponseDeserializer extends JsonDeserializer { + @Override + public GetRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRunResponsePb pb = mapper.readValue(p, GetRunResponsePb.class); + return GetRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponsePb.java new file mode 100755 index 000000000..baa639bb5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetRunResponsePb { + @JsonProperty("run") + private Run run; + + public GetRunResponsePb setRun(Run run) { + this.run = run; + return this; + } + + public Run getRun() { + return run; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRunResponsePb that = (GetRunResponsePb) o; + return Objects.equals(run, that.run); + } + + @Override + public int hashCode() { + return Objects.hash(run); + } + + @Override + public String toString() { + return new ToStringer(GetRunResponsePb.class).add("run", run).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpec.java index a7de79ab0..0af7235d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpec.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = HttpUrlSpec.HttpUrlSpecSerializer.class) +@JsonDeserialize(using = HttpUrlSpec.HttpUrlSpecDeserializer.class) public class HttpUrlSpec { /** * Value of the authorization header that should be sent in the request sent by the wehbook. It * should be of the form `" "`. If set to an empty string, no * authorization header will be included in the request. */ - @JsonProperty("authorization") private String authorization; /** @@ -24,18 +34,15 @@ public class HttpUrlSpec { * of the payload and acknowledge the risk associated with disabling hostname validation whereby * it becomes more likely that requests can be maliciously routed to an unintended host. */ - @JsonProperty("enable_ssl_verification") private Boolean enableSslVerification; /** * Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the * header as: { "X-Databricks-Signature": $encoded_payload }. */ - @JsonProperty("secret") private String secret; /** External HTTPS URL called on event trigger (by using a POST request). */ - @JsonProperty("url") private String url; public HttpUrlSpec setAuthorization(String authorization) { @@ -99,4 +106,43 @@ public String toString() { .add("url", url) .toString(); } + + HttpUrlSpecPb toPb() { + HttpUrlSpecPb pb = new HttpUrlSpecPb(); + pb.setAuthorization(authorization); + pb.setEnableSslVerification(enableSslVerification); + pb.setSecret(secret); + pb.setUrl(url); + + return pb; + } + + static HttpUrlSpec fromPb(HttpUrlSpecPb pb) { + HttpUrlSpec model = new HttpUrlSpec(); + model.setAuthorization(pb.getAuthorization()); + model.setEnableSslVerification(pb.getEnableSslVerification()); + model.setSecret(pb.getSecret()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class HttpUrlSpecSerializer extends JsonSerializer { + @Override + public void serialize(HttpUrlSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + HttpUrlSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class HttpUrlSpecDeserializer extends JsonDeserializer { + @Override + public HttpUrlSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + HttpUrlSpecPb pb = mapper.readValue(p, HttpUrlSpecPb.class); + return HttpUrlSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecPb.java new file mode 100755 index 000000000..c1c806270 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class HttpUrlSpecPb { + @JsonProperty("authorization") + private String authorization; + + @JsonProperty("enable_ssl_verification") + private Boolean enableSslVerification; + + @JsonProperty("secret") + private String secret; + + @JsonProperty("url") + private String url; + + public HttpUrlSpecPb setAuthorization(String authorization) { + this.authorization = authorization; + return this; + } + + public String getAuthorization() { + return authorization; + } + + public HttpUrlSpecPb setEnableSslVerification(Boolean enableSslVerification) { + this.enableSslVerification = enableSslVerification; + return this; + } + + public Boolean getEnableSslVerification() { + return enableSslVerification; + } + + public HttpUrlSpecPb setSecret(String secret) { + this.secret = secret; + return this; + } + + public String getSecret() { + return secret; + } + + public HttpUrlSpecPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HttpUrlSpecPb that = (HttpUrlSpecPb) o; + return Objects.equals(authorization, that.authorization) + && Objects.equals(enableSslVerification, that.enableSslVerification) + && Objects.equals(secret, that.secret) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(authorization, enableSslVerification, secret, url); + } + + @Override + public String toString() { + return new ToStringer(HttpUrlSpecPb.class) + .add("authorization", authorization) + .add("enableSslVerification", enableSslVerification) + .add("secret", secret) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecret.java index cafde26f8..7b62058b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecret.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = HttpUrlSpecWithoutSecret.HttpUrlSpecWithoutSecretSerializer.class) +@JsonDeserialize(using = HttpUrlSpecWithoutSecret.HttpUrlSpecWithoutSecretDeserializer.class) public class HttpUrlSpecWithoutSecret { /** * Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this @@ -16,11 +27,9 @@ public class HttpUrlSpecWithoutSecret { * of the payload and acknowledge the risk associated with disabling hostname validation whereby * it becomes more likely that requests can be maliciously routed to an unintended host. */ - @JsonProperty("enable_ssl_verification") private Boolean enableSslVerification; /** External HTTPS URL called on event trigger (by using a POST request). */ - @JsonProperty("url") private String url; public HttpUrlSpecWithoutSecret setEnableSslVerification(Boolean enableSslVerification) { @@ -62,4 +71,43 @@ public String toString() { .add("url", url) .toString(); } + + HttpUrlSpecWithoutSecretPb toPb() { + HttpUrlSpecWithoutSecretPb pb = new HttpUrlSpecWithoutSecretPb(); + pb.setEnableSslVerification(enableSslVerification); + pb.setUrl(url); + + return pb; + } + + static HttpUrlSpecWithoutSecret fromPb(HttpUrlSpecWithoutSecretPb pb) { + HttpUrlSpecWithoutSecret model = new HttpUrlSpecWithoutSecret(); + model.setEnableSslVerification(pb.getEnableSslVerification()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class HttpUrlSpecWithoutSecretSerializer + extends JsonSerializer { + @Override + public void serialize( + HttpUrlSpecWithoutSecret value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + HttpUrlSpecWithoutSecretPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class HttpUrlSpecWithoutSecretDeserializer + extends JsonDeserializer { + @Override + public HttpUrlSpecWithoutSecret deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + HttpUrlSpecWithoutSecretPb pb = mapper.readValue(p, HttpUrlSpecWithoutSecretPb.class); + return HttpUrlSpecWithoutSecret.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecretPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecretPb.java new file mode 100755 index 000000000..2f9b2050c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecretPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class HttpUrlSpecWithoutSecretPb { + @JsonProperty("enable_ssl_verification") + private Boolean enableSslVerification; + + @JsonProperty("url") + private String url; + + public HttpUrlSpecWithoutSecretPb setEnableSslVerification(Boolean enableSslVerification) { + this.enableSslVerification = enableSslVerification; + return this; + } + + public Boolean getEnableSslVerification() { + return enableSslVerification; + } + + public HttpUrlSpecWithoutSecretPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HttpUrlSpecWithoutSecretPb that = (HttpUrlSpecWithoutSecretPb) o; + return Objects.equals(enableSslVerification, that.enableSslVerification) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(enableSslVerification, url); + } + + @Override + public String toString() { + return new ToStringer(HttpUrlSpecWithoutSecretPb.class) + .add("enableSslVerification", enableSslVerification) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java index 1ccaceeae..758d5ff14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Tag for a dataset input. */ @Generated +@JsonSerialize(using = InputTag.InputTagSerializer.class) +@JsonDeserialize(using = InputTag.InputTagDeserializer.class) public class InputTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public InputTag setKey(String key) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(InputTag.class).add("key", key).add("value", value).toString(); } + + InputTagPb toPb() { + InputTagPb pb = new InputTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static InputTag fromPb(InputTagPb pb) { + InputTag model = new InputTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class InputTagSerializer extends JsonSerializer { + @Override + public void serialize(InputTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + InputTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class InputTagDeserializer extends JsonDeserializer { + @Override + public InputTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + InputTagPb pb = mapper.readValue(p, InputTagPb.class); + return InputTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTagPb.java new file mode 100755 index 000000000..aafca8a22 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTagPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Tag for a dataset input. */ +@Generated +class InputTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public InputTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public InputTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InputTagPb that = (InputTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(InputTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpec.java index 32614641e..cbc795e68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpec.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobSpec.JobSpecSerializer.class) +@JsonDeserialize(using = JobSpec.JobSpecDeserializer.class) public class JobSpec { /** The personal access token used to authorize webhook's job runs. */ - @JsonProperty("access_token") private String accessToken; /** ID of the job that the webhook runs. */ - @JsonProperty("job_id") private String jobId; /** * URL of the workspace containing the job that this webhook runs. If not specified, the job’s * workspace URL is assumed to be the same as the workspace where the webhook is created. */ - @JsonProperty("workspace_url") private String workspaceUrl; public JobSpec setAccessToken(String accessToken) { @@ -74,4 +82,41 @@ public String toString() { .add("workspaceUrl", workspaceUrl) .toString(); } + + JobSpecPb toPb() { + JobSpecPb pb = new JobSpecPb(); + pb.setAccessToken(accessToken); + pb.setJobId(jobId); + pb.setWorkspaceUrl(workspaceUrl); + + return pb; + } + + static JobSpec fromPb(JobSpecPb pb) { + JobSpec model = new JobSpec(); + model.setAccessToken(pb.getAccessToken()); + model.setJobId(pb.getJobId()); + model.setWorkspaceUrl(pb.getWorkspaceUrl()); + + return model; + } + + public static class JobSpecSerializer extends JsonSerializer { + @Override + public void serialize(JobSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobSpecDeserializer extends JsonDeserializer { + @Override + public JobSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobSpecPb pb = mapper.readValue(p, JobSpecPb.class); + return JobSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecPb.java new file mode 100755 index 000000000..b6fb7a6b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobSpecPb { + @JsonProperty("access_token") + private String accessToken; + + @JsonProperty("job_id") + private String jobId; + + @JsonProperty("workspace_url") + private String workspaceUrl; + + public JobSpecPb setAccessToken(String accessToken) { + this.accessToken = accessToken; + return this; + } + + public String getAccessToken() { + return accessToken; + } + + public JobSpecPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + public JobSpecPb setWorkspaceUrl(String workspaceUrl) { + this.workspaceUrl = workspaceUrl; + return this; + } + + public String getWorkspaceUrl() { + return workspaceUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobSpecPb that = (JobSpecPb) o; + return Objects.equals(accessToken, that.accessToken) + && Objects.equals(jobId, that.jobId) + && Objects.equals(workspaceUrl, that.workspaceUrl); + } + + @Override + public int hashCode() { + return Objects.hash(accessToken, jobId, workspaceUrl); + } + + @Override + public String toString() { + return new ToStringer(JobSpecPb.class) + .add("accessToken", accessToken) + .add("jobId", jobId) + .add("workspaceUrl", workspaceUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java index 281d2982a..1df6140b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = JobSpecWithoutSecret.JobSpecWithoutSecretSerializer.class) +@JsonDeserialize(using = JobSpecWithoutSecret.JobSpecWithoutSecretDeserializer.class) public class JobSpecWithoutSecret { /** ID of the job that the webhook runs. */ - @JsonProperty("job_id") private String jobId; /** @@ -18,7 +28,6 @@ public class JobSpecWithoutSecret { * in which the webhook is created. If not specified, the job’s workspace is assumed to be the * same as the webhook’s. */ - @JsonProperty("workspace_url") private String workspaceUrl; public JobSpecWithoutSecret setJobId(String jobId) { @@ -59,4 +68,42 @@ public String toString() { .add("workspaceUrl", workspaceUrl) .toString(); } + + JobSpecWithoutSecretPb toPb() { + JobSpecWithoutSecretPb pb = new JobSpecWithoutSecretPb(); + pb.setJobId(jobId); + pb.setWorkspaceUrl(workspaceUrl); + + return pb; + } + + static JobSpecWithoutSecret fromPb(JobSpecWithoutSecretPb pb) { + JobSpecWithoutSecret model = new JobSpecWithoutSecret(); + model.setJobId(pb.getJobId()); + model.setWorkspaceUrl(pb.getWorkspaceUrl()); + + return model; + } + + public static class JobSpecWithoutSecretSerializer extends JsonSerializer { + @Override + public void serialize( + JobSpecWithoutSecret value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + JobSpecWithoutSecretPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class JobSpecWithoutSecretDeserializer + extends JsonDeserializer { + @Override + public JobSpecWithoutSecret deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + JobSpecWithoutSecretPb pb = mapper.readValue(p, JobSpecWithoutSecretPb.class); + return JobSpecWithoutSecret.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecretPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecretPb.java new file mode 100755 index 000000000..195c9f666 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecretPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class JobSpecWithoutSecretPb { + @JsonProperty("job_id") + private String jobId; + + @JsonProperty("workspace_url") + private String workspaceUrl; + + public JobSpecWithoutSecretPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + public JobSpecWithoutSecretPb setWorkspaceUrl(String workspaceUrl) { + this.workspaceUrl = workspaceUrl; + return this; + } + + public String getWorkspaceUrl() { + return workspaceUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobSpecWithoutSecretPb that = (JobSpecWithoutSecretPb) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(workspaceUrl, that.workspaceUrl); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, workspaceUrl); + } + + @Override + public String toString() { + return new ToStringer(JobSpecWithoutSecretPb.class) + .add("jobId", jobId) + .add("workspaceUrl", workspaceUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java index d1211a376..e39782758 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List artifacts */ @Generated +@JsonSerialize(using = ListArtifactsRequest.ListArtifactsRequestSerializer.class) +@JsonDeserialize(using = ListArtifactsRequest.ListArtifactsRequestDeserializer.class) public class ListArtifactsRequest { /** * The token indicating the page of artifact results to fetch. `page_token` is not supported when @@ -18,26 +28,18 @@ public class ListArtifactsRequest { * which supports pagination. See [List directory contents | Files * API](/api/workspace/files/listdirectorycontents). */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** Filter artifacts matching this path (a relative path from the root artifact directory). */ - @JsonIgnore - @QueryParam("path") private String path; /** ID of the run whose artifacts to list. Must be provided. */ - @JsonIgnore - @QueryParam("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be * removed in a future MLflow version. */ - @JsonIgnore - @QueryParam("run_uuid") private String runUuid; public ListArtifactsRequest setPageToken(String pageToken) { @@ -101,4 +103,46 @@ public String toString() { .add("runUuid", runUuid) .toString(); } + + ListArtifactsRequestPb toPb() { + ListArtifactsRequestPb pb = new ListArtifactsRequestPb(); + pb.setPageToken(pageToken); + pb.setPath(path); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + + return pb; + } + + static ListArtifactsRequest fromPb(ListArtifactsRequestPb pb) { + ListArtifactsRequest model = new ListArtifactsRequest(); + model.setPageToken(pb.getPageToken()); + model.setPath(pb.getPath()); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + + return model; + } + + public static class ListArtifactsRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListArtifactsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListArtifactsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListArtifactsRequestDeserializer + extends JsonDeserializer { + @Override + public ListArtifactsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListArtifactsRequestPb pb = mapper.readValue(p, ListArtifactsRequestPb.class); + return ListArtifactsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequestPb.java new file mode 100755 index 000000000..c4a56585a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List artifacts */ +@Generated +class ListArtifactsRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("path") + private String path; + + @JsonIgnore + @QueryParam("run_id") + private String runId; + + @JsonIgnore + @QueryParam("run_uuid") + private String runUuid; + + public ListArtifactsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListArtifactsRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public ListArtifactsRequestPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ListArtifactsRequestPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListArtifactsRequestPb that = (ListArtifactsRequestPb) o; + return Objects.equals(pageToken, that.pageToken) + && Objects.equals(path, that.path) + && Objects.equals(runId, that.runId) + && Objects.equals(runUuid, that.runUuid); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken, path, runId, runUuid); + } + + @Override + public String toString() { + return new ToStringer(ListArtifactsRequestPb.class) + .add("pageToken", pageToken) + .add("path", path) + .add("runId", runId) + .add("runUuid", runUuid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java index dac42941b..5964a7d52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListArtifactsResponse.ListArtifactsResponseSerializer.class) +@JsonDeserialize(using = ListArtifactsResponse.ListArtifactsResponseDeserializer.class) public class ListArtifactsResponse { /** The file location and metadata for artifacts. */ - @JsonProperty("files") private Collection files; /** The token that can be used to retrieve the next page of artifact results. */ - @JsonProperty("next_page_token") private String nextPageToken; /** The root artifact directory for the run. */ - @JsonProperty("root_uri") private String rootUri; public ListArtifactsResponse setFiles(Collection files) { @@ -72,4 +80,45 @@ public String toString() { .add("rootUri", rootUri) .toString(); } + + ListArtifactsResponsePb toPb() { + ListArtifactsResponsePb pb = new ListArtifactsResponsePb(); + pb.setFiles(files); + pb.setNextPageToken(nextPageToken); + pb.setRootUri(rootUri); + + return pb; + } + + static ListArtifactsResponse fromPb(ListArtifactsResponsePb pb) { + ListArtifactsResponse model = new ListArtifactsResponse(); + model.setFiles(pb.getFiles()); + model.setNextPageToken(pb.getNextPageToken()); + model.setRootUri(pb.getRootUri()); + + return model; + } + + public static class ListArtifactsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListArtifactsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListArtifactsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListArtifactsResponseDeserializer + extends JsonDeserializer { + @Override + public ListArtifactsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListArtifactsResponsePb pb = mapper.readValue(p, ListArtifactsResponsePb.class); + return ListArtifactsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponsePb.java new file mode 100755 index 000000000..05b6338d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListArtifactsResponsePb { + @JsonProperty("files") + private Collection files; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("root_uri") + private String rootUri; + + public ListArtifactsResponsePb setFiles(Collection files) { + this.files = files; + return this; + } + + public Collection getFiles() { + return files; + } + + public ListArtifactsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListArtifactsResponsePb setRootUri(String rootUri) { + this.rootUri = rootUri; + return this; + } + + public String getRootUri() { + return rootUri; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListArtifactsResponsePb that = (ListArtifactsResponsePb) o; + return Objects.equals(files, that.files) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(rootUri, that.rootUri); + } + + @Override + public int hashCode() { + return Objects.hash(files, nextPageToken, rootUri); + } + + @Override + public String toString() { + return new ToStringer(ListArtifactsResponsePb.class) + .add("files", files) + .add("nextPageToken", nextPageToken) + .add("rootUri", rootUri) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java index f3b09e157..fe1fdb2cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List experiments */ @Generated +@JsonSerialize(using = ListExperimentsRequest.ListExperimentsRequestSerializer.class) +@JsonDeserialize(using = ListExperimentsRequest.ListExperimentsRequestDeserializer.class) public class ListExperimentsRequest { /** * Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. @@ -17,21 +27,15 @@ public class ListExperimentsRequest { * are encouraged to pass max_results explicitly and leverage page_token to iterate through * experiments. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Token indicating the page of experiments to fetch */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * Qualifier for type of experiments to be returned. If unspecified, return only active * experiments. */ - @JsonIgnore - @QueryParam("view_type") private ViewType viewType; public ListExperimentsRequest setMaxResults(Long maxResults) { @@ -84,4 +88,45 @@ public String toString() { .add("viewType", viewType) .toString(); } + + ListExperimentsRequestPb toPb() { + ListExperimentsRequestPb pb = new ListExperimentsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setViewType(viewType); + + return pb; + } + + static ListExperimentsRequest fromPb(ListExperimentsRequestPb pb) { + ListExperimentsRequest model = new ListExperimentsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setViewType(pb.getViewType()); + + return model; + } + + public static class ListExperimentsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExperimentsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExperimentsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExperimentsRequestDeserializer + extends JsonDeserializer { + @Override + public ListExperimentsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExperimentsRequestPb pb = mapper.readValue(p, ListExperimentsRequestPb.class); + return ListExperimentsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequestPb.java new file mode 100755 index 000000000..c9afccaf4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List experiments */ +@Generated +class ListExperimentsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore + @QueryParam("view_type") + private ViewType viewType; + + public ListExperimentsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListExperimentsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListExperimentsRequestPb setViewType(ViewType viewType) { + this.viewType = viewType; + return this; + } + + public ViewType getViewType() { + return viewType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExperimentsRequestPb that = (ListExperimentsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(viewType, that.viewType); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, viewType); + } + + @Override + public String toString() { + return new ToStringer(ListExperimentsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("viewType", viewType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java index 133d5bccf..092b9e1d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListExperimentsResponse.ListExperimentsResponseSerializer.class) +@JsonDeserialize(using = ListExperimentsResponse.ListExperimentsResponseDeserializer.class) public class ListExperimentsResponse { /** Paginated Experiments beginning with the first item on the requested page. */ - @JsonProperty("experiments") private Collection experiments; /** * Token that can be used to retrieve the next page of experiments. Empty token means no more * experiment is available for retrieval. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListExperimentsResponse setExperiments(Collection experiments) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListExperimentsResponsePb toPb() { + ListExperimentsResponsePb pb = new ListExperimentsResponsePb(); + pb.setExperiments(experiments); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListExperimentsResponse fromPb(ListExperimentsResponsePb pb) { + ListExperimentsResponse model = new ListExperimentsResponse(); + model.setExperiments(pb.getExperiments()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListExperimentsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListExperimentsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListExperimentsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListExperimentsResponseDeserializer + extends JsonDeserializer { + @Override + public ListExperimentsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListExperimentsResponsePb pb = mapper.readValue(p, ListExperimentsResponsePb.class); + return ListExperimentsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponsePb.java new file mode 100755 index 000000000..2f96eb073 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListExperimentsResponsePb { + @JsonProperty("experiments") + private Collection experiments; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExperimentsResponsePb setExperiments(Collection experiments) { + this.experiments = experiments; + return this; + } + + public Collection getExperiments() { + return experiments; + } + + public ListExperimentsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExperimentsResponsePb that = (ListExperimentsResponsePb) o; + return Objects.equals(experiments, that.experiments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(experiments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExperimentsResponsePb.class) + .add("experiments", experiments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java index e94842c95..7839e2590 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java @@ -3,21 +3,31 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List artifacts for a logged model */ @Generated +@JsonSerialize( + using = ListLoggedModelArtifactsRequest.ListLoggedModelArtifactsRequestSerializer.class) +@JsonDeserialize( + using = ListLoggedModelArtifactsRequest.ListLoggedModelArtifactsRequestDeserializer.class) public class ListLoggedModelArtifactsRequest { /** Filter artifacts matching this path (a relative path from the root artifact directory). */ - @JsonIgnore - @QueryParam("artifact_directory_path") private String artifactDirectoryPath; /** The ID of the logged model for which to list the artifacts. */ - @JsonIgnore private String modelId; + private String modelId; /** * Token indicating the page of artifact results to fetch. `page_token` is not supported when @@ -26,8 +36,6 @@ public class ListLoggedModelArtifactsRequest { * which supports pagination. See [List directory contents | Files * API](/api/workspace/files/listdirectorycontents). */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListLoggedModelArtifactsRequest setArtifactDirectoryPath(String artifactDirectoryPath) { @@ -80,4 +88,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListLoggedModelArtifactsRequestPb toPb() { + ListLoggedModelArtifactsRequestPb pb = new ListLoggedModelArtifactsRequestPb(); + pb.setArtifactDirectoryPath(artifactDirectoryPath); + pb.setModelId(modelId); + pb.setPageToken(pageToken); + + return pb; + } + + static ListLoggedModelArtifactsRequest fromPb(ListLoggedModelArtifactsRequestPb pb) { + ListLoggedModelArtifactsRequest model = new ListLoggedModelArtifactsRequest(); + model.setArtifactDirectoryPath(pb.getArtifactDirectoryPath()); + model.setModelId(pb.getModelId()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListLoggedModelArtifactsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListLoggedModelArtifactsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListLoggedModelArtifactsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListLoggedModelArtifactsRequestDeserializer + extends JsonDeserializer { + @Override + public ListLoggedModelArtifactsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListLoggedModelArtifactsRequestPb pb = + mapper.readValue(p, ListLoggedModelArtifactsRequestPb.class); + return ListLoggedModelArtifactsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequestPb.java new file mode 100755 index 000000000..708def4a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List artifacts for a logged model */ +@Generated +class ListLoggedModelArtifactsRequestPb { + @JsonIgnore + @QueryParam("artifact_directory_path") + private String artifactDirectoryPath; + + @JsonIgnore private String modelId; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListLoggedModelArtifactsRequestPb setArtifactDirectoryPath(String artifactDirectoryPath) { + this.artifactDirectoryPath = artifactDirectoryPath; + return this; + } + + public String getArtifactDirectoryPath() { + return artifactDirectoryPath; + } + + public ListLoggedModelArtifactsRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public ListLoggedModelArtifactsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListLoggedModelArtifactsRequestPb that = (ListLoggedModelArtifactsRequestPb) o; + return Objects.equals(artifactDirectoryPath, that.artifactDirectoryPath) + && Objects.equals(modelId, that.modelId) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(artifactDirectoryPath, modelId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListLoggedModelArtifactsRequestPb.class) + .add("artifactDirectoryPath", artifactDirectoryPath) + .add("modelId", modelId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java index 35e2dbe82..ae8334086 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java @@ -4,22 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListLoggedModelArtifactsResponse.ListLoggedModelArtifactsResponseSerializer.class) +@JsonDeserialize( + using = ListLoggedModelArtifactsResponse.ListLoggedModelArtifactsResponseDeserializer.class) public class ListLoggedModelArtifactsResponse { /** File location and metadata for artifacts. */ - @JsonProperty("files") private Collection files; /** Token that can be used to retrieve the next page of artifact results */ - @JsonProperty("next_page_token") private String nextPageToken; /** Root artifact directory for the logged model. */ - @JsonProperty("root_uri") private String rootUri; public ListLoggedModelArtifactsResponse setFiles(Collection files) { @@ -72,4 +82,46 @@ public String toString() { .add("rootUri", rootUri) .toString(); } + + ListLoggedModelArtifactsResponsePb toPb() { + ListLoggedModelArtifactsResponsePb pb = new ListLoggedModelArtifactsResponsePb(); + pb.setFiles(files); + pb.setNextPageToken(nextPageToken); + pb.setRootUri(rootUri); + + return pb; + } + + static ListLoggedModelArtifactsResponse fromPb(ListLoggedModelArtifactsResponsePb pb) { + ListLoggedModelArtifactsResponse model = new ListLoggedModelArtifactsResponse(); + model.setFiles(pb.getFiles()); + model.setNextPageToken(pb.getNextPageToken()); + model.setRootUri(pb.getRootUri()); + + return model; + } + + public static class ListLoggedModelArtifactsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListLoggedModelArtifactsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListLoggedModelArtifactsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListLoggedModelArtifactsResponseDeserializer + extends JsonDeserializer { + @Override + public ListLoggedModelArtifactsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListLoggedModelArtifactsResponsePb pb = + mapper.readValue(p, ListLoggedModelArtifactsResponsePb.class); + return ListLoggedModelArtifactsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponsePb.java new file mode 100755 index 000000000..5d55fc6d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListLoggedModelArtifactsResponsePb { + @JsonProperty("files") + private Collection files; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("root_uri") + private String rootUri; + + public ListLoggedModelArtifactsResponsePb setFiles(Collection files) { + this.files = files; + return this; + } + + public Collection getFiles() { + return files; + } + + public ListLoggedModelArtifactsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListLoggedModelArtifactsResponsePb setRootUri(String rootUri) { + this.rootUri = rootUri; + return this; + } + + public String getRootUri() { + return rootUri; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListLoggedModelArtifactsResponsePb that = (ListLoggedModelArtifactsResponsePb) o; + return Objects.equals(files, that.files) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(rootUri, that.rootUri); + } + + @Override + public int hashCode() { + return Objects.hash(files, nextPageToken, rootUri); + } + + @Override + public String toString() { + return new ToStringer(ListLoggedModelArtifactsResponsePb.class) + .add("files", files) + .add("nextPageToken", nextPageToken) + .add("rootUri", rootUri) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java index c39f0b536..afc23d6f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List models */ @Generated +@JsonSerialize(using = ListModelsRequest.ListModelsRequestSerializer.class) +@JsonDeserialize(using = ListModelsRequest.ListModelsRequestDeserializer.class) public class ListModelsRequest { /** Maximum number of registered models desired. Max threshold is 1000. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Pagination token to go to the next page based on a previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListModelsRequest setMaxResults(Long maxResults) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListModelsRequestPb toPb() { + ListModelsRequestPb pb = new ListModelsRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListModelsRequest fromPb(ListModelsRequestPb pb) { + ListModelsRequest model = new ListModelsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListModelsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListModelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListModelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListModelsRequestDeserializer extends JsonDeserializer { + @Override + public ListModelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListModelsRequestPb pb = mapper.readValue(p, ListModelsRequestPb.class); + return ListModelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequestPb.java new file mode 100755 index 000000000..bd6b2b87d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List models */ +@Generated +class ListModelsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListModelsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListModelsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListModelsRequestPb that = (ListModelsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListModelsRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java index f13a7f823..0386c6cad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListModelsResponse.ListModelsResponseSerializer.class) +@JsonDeserialize(using = ListModelsResponse.ListModelsResponseDeserializer.class) public class ListModelsResponse { /** Pagination token to request next page of models for the same query. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("registered_models") private Collection registeredModels; public ListModelsResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,40 @@ public String toString() { .add("registeredModels", registeredModels) .toString(); } + + ListModelsResponsePb toPb() { + ListModelsResponsePb pb = new ListModelsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRegisteredModels(registeredModels); + + return pb; + } + + static ListModelsResponse fromPb(ListModelsResponsePb pb) { + ListModelsResponse model = new ListModelsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRegisteredModels(pb.getRegisteredModels()); + + return model; + } + + public static class ListModelsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListModelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListModelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListModelsResponseDeserializer extends JsonDeserializer { + @Override + public ListModelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListModelsResponsePb pb = mapper.readValue(p, ListModelsResponsePb.class); + return ListModelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponsePb.java new file mode 100755 index 000000000..7b1b95555 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListModelsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("registered_models") + private Collection registeredModels; + + public ListModelsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListModelsResponsePb setRegisteredModels(Collection registeredModels) { + this.registeredModels = registeredModels; + return this; + } + + public Collection getRegisteredModels() { + return registeredModels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListModelsResponsePb that = (ListModelsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(registeredModels, that.registeredModels); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, registeredModels); + } + + @Override + public String toString() { + return new ToStringer(ListModelsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("registeredModels", registeredModels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java index 4a7e0182c..1be39b43d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListRegistryWebhooks.ListRegistryWebhooksSerializer.class) +@JsonDeserialize(using = ListRegistryWebhooks.ListRegistryWebhooksDeserializer.class) public class ListRegistryWebhooks { /** Token that can be used to retrieve the next page of artifact results */ - @JsonProperty("next_page_token") private String nextPageToken; /** Array of registry webhooks. */ - @JsonProperty("webhooks") private Collection webhooks; public ListRegistryWebhooks setNextPageToken(String nextPageToken) { @@ -57,4 +66,42 @@ public String toString() { .add("webhooks", webhooks) .toString(); } + + ListRegistryWebhooksPb toPb() { + ListRegistryWebhooksPb pb = new ListRegistryWebhooksPb(); + pb.setNextPageToken(nextPageToken); + pb.setWebhooks(webhooks); + + return pb; + } + + static ListRegistryWebhooks fromPb(ListRegistryWebhooksPb pb) { + ListRegistryWebhooks model = new ListRegistryWebhooks(); + model.setNextPageToken(pb.getNextPageToken()); + model.setWebhooks(pb.getWebhooks()); + + return model; + } + + public static class ListRegistryWebhooksSerializer extends JsonSerializer { + @Override + public void serialize( + ListRegistryWebhooks value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRegistryWebhooksPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRegistryWebhooksDeserializer + extends JsonDeserializer { + @Override + public ListRegistryWebhooks deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRegistryWebhooksPb pb = mapper.readValue(p, ListRegistryWebhooksPb.class); + return ListRegistryWebhooks.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooksPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooksPb.java new file mode 100755 index 000000000..6e3d43c0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooksPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListRegistryWebhooksPb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("webhooks") + private Collection webhooks; + + public ListRegistryWebhooksPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListRegistryWebhooksPb setWebhooks(Collection webhooks) { + this.webhooks = webhooks; + return this; + } + + public Collection getWebhooks() { + return webhooks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRegistryWebhooksPb that = (ListRegistryWebhooksPb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(webhooks, that.webhooks); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, webhooks); + } + + @Override + public String toString() { + return new ToStringer(ListRegistryWebhooksPb.class) + .add("nextPageToken", nextPageToken) + .add("webhooks", webhooks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java index 5f6196c45..c27ea0f09 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java @@ -3,22 +3,29 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List transition requests */ @Generated +@JsonSerialize(using = ListTransitionRequestsRequest.ListTransitionRequestsRequestSerializer.class) +@JsonDeserialize( + using = ListTransitionRequestsRequest.ListTransitionRequestsRequestDeserializer.class) public class ListTransitionRequestsRequest { /** Name of the model. */ - @JsonIgnore - @QueryParam("name") private String name; /** Version of the model. */ - @JsonIgnore - @QueryParam("version") private String version; public ListTransitionRequestsRequest setName(String name) { @@ -59,4 +66,44 @@ public String toString() { .add("version", version) .toString(); } + + ListTransitionRequestsRequestPb toPb() { + ListTransitionRequestsRequestPb pb = new ListTransitionRequestsRequestPb(); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static ListTransitionRequestsRequest fromPb(ListTransitionRequestsRequestPb pb) { + ListTransitionRequestsRequest model = new ListTransitionRequestsRequest(); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ListTransitionRequestsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListTransitionRequestsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTransitionRequestsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTransitionRequestsRequestDeserializer + extends JsonDeserializer { + @Override + public ListTransitionRequestsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTransitionRequestsRequestPb pb = + mapper.readValue(p, ListTransitionRequestsRequestPb.class); + return ListTransitionRequestsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequestPb.java new file mode 100755 index 000000000..3bcafb5b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List transition requests */ +@Generated +class ListTransitionRequestsRequestPb { + @JsonIgnore + @QueryParam("name") + private String name; + + @JsonIgnore + @QueryParam("version") + private String version; + + public ListTransitionRequestsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ListTransitionRequestsRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTransitionRequestsRequestPb that = (ListTransitionRequestsRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + @Override + public String toString() { + return new ToStringer(ListTransitionRequestsRequestPb.class) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java index 2441e4220..eef787e04 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListTransitionRequestsResponse.ListTransitionRequestsResponseSerializer.class) +@JsonDeserialize( + using = ListTransitionRequestsResponse.ListTransitionRequestsResponseDeserializer.class) public class ListTransitionRequestsResponse { /** Array of open transition requests. */ - @JsonProperty("requests") private Collection requests; public ListTransitionRequestsResponse setRequests(Collection requests) { @@ -42,4 +54,42 @@ public String toString() { .add("requests", requests) .toString(); } + + ListTransitionRequestsResponsePb toPb() { + ListTransitionRequestsResponsePb pb = new ListTransitionRequestsResponsePb(); + pb.setRequests(requests); + + return pb; + } + + static ListTransitionRequestsResponse fromPb(ListTransitionRequestsResponsePb pb) { + ListTransitionRequestsResponse model = new ListTransitionRequestsResponse(); + model.setRequests(pb.getRequests()); + + return model; + } + + public static class ListTransitionRequestsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListTransitionRequestsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTransitionRequestsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTransitionRequestsResponseDeserializer + extends JsonDeserializer { + @Override + public ListTransitionRequestsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTransitionRequestsResponsePb pb = + mapper.readValue(p, ListTransitionRequestsResponsePb.class); + return ListTransitionRequestsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponsePb.java new file mode 100755 index 000000000..3e5a48fda --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListTransitionRequestsResponsePb { + @JsonProperty("requests") + private Collection requests; + + public ListTransitionRequestsResponsePb setRequests(Collection requests) { + this.requests = requests; + return this; + } + + public Collection getRequests() { + return requests; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTransitionRequestsResponsePb that = (ListTransitionRequestsResponsePb) o; + return Objects.equals(requests, that.requests); + } + + @Override + public int hashCode() { + return Objects.hash(requests); + } + + @Override + public String toString() { + return new ToStringer(ListTransitionRequestsResponsePb.class) + .add("requests", requests) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java index 4d387e55d..32f0ccc60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java @@ -3,35 +3,39 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List registry webhooks */ @Generated +@JsonSerialize(using = ListWebhooksRequest.ListWebhooksRequestSerializer.class) +@JsonDeserialize(using = ListWebhooksRequest.ListWebhooksRequestDeserializer.class) public class ListWebhooksRequest { /** * If `events` is specified, any webhook with one or more of the specified trigger events is * included in the output. If `events` is not specified, webhooks of all event types are included * in the output. */ - @JsonIgnore - @QueryParam("events") private Collection events; /** * If not specified, all webhooks associated with the specified events are listed, regardless of * their associated model. */ - @JsonIgnore - @QueryParam("model_name") private String modelName; /** Token indicating the page of artifact results to fetch */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListWebhooksRequest setEvents(Collection events) { @@ -84,4 +88,43 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListWebhooksRequestPb toPb() { + ListWebhooksRequestPb pb = new ListWebhooksRequestPb(); + pb.setEvents(events); + pb.setModelName(modelName); + pb.setPageToken(pageToken); + + return pb; + } + + static ListWebhooksRequest fromPb(ListWebhooksRequestPb pb) { + ListWebhooksRequest model = new ListWebhooksRequest(); + model.setEvents(pb.getEvents()); + model.setModelName(pb.getModelName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListWebhooksRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListWebhooksRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListWebhooksRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListWebhooksRequestDeserializer + extends JsonDeserializer { + @Override + public ListWebhooksRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListWebhooksRequestPb pb = mapper.readValue(p, ListWebhooksRequestPb.class); + return ListWebhooksRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequestPb.java new file mode 100755 index 000000000..74a2bd149 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequestPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** List registry webhooks */ +@Generated +class ListWebhooksRequestPb { + @JsonIgnore + @QueryParam("events") + private Collection events; + + @JsonIgnore + @QueryParam("model_name") + private String modelName; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListWebhooksRequestPb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public ListWebhooksRequestPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ListWebhooksRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWebhooksRequestPb that = (ListWebhooksRequestPb) o; + return Objects.equals(events, that.events) + && Objects.equals(modelName, that.modelName) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(events, modelName, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListWebhooksRequestPb.class) + .add("events", events) + .add("modelName", modelName) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java index 1fc66e037..0c5b4c4cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java @@ -4,35 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LogBatch.LogBatchSerializer.class) +@JsonDeserialize(using = LogBatch.LogBatchDeserializer.class) public class LogBatch { /** * Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, * params, and tags in total. */ - @JsonProperty("metrics") private Collection metrics; /** * Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, * and tags in total. */ - @JsonProperty("params") private Collection params; /** ID of the run to log under */ - @JsonProperty("run_id") private String runId; /** * Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and * tags in total. */ - @JsonProperty("tags") private Collection tags; public LogBatch setMetrics(Collection metrics) { @@ -96,4 +103,43 @@ public String toString() { .add("tags", tags) .toString(); } + + LogBatchPb toPb() { + LogBatchPb pb = new LogBatchPb(); + pb.setMetrics(metrics); + pb.setParams(params); + pb.setRunId(runId); + pb.setTags(tags); + + return pb; + } + + static LogBatch fromPb(LogBatchPb pb) { + LogBatch model = new LogBatch(); + model.setMetrics(pb.getMetrics()); + model.setParams(pb.getParams()); + model.setRunId(pb.getRunId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class LogBatchSerializer extends JsonSerializer { + @Override + public void serialize(LogBatch value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogBatchPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogBatchDeserializer extends JsonDeserializer { + @Override + public LogBatch deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogBatchPb pb = mapper.readValue(p, LogBatchPb.class); + return LogBatch.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchPb.java new file mode 100755 index 000000000..f4b0539d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LogBatchPb { + @JsonProperty("metrics") + private Collection metrics; + + @JsonProperty("params") + private Collection params; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("tags") + private Collection tags; + + public LogBatchPb setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public LogBatchPb setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + public LogBatchPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public LogBatchPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogBatchPb that = (LogBatchPb) o; + return Objects.equals(metrics, that.metrics) + && Objects.equals(params, that.params) + && Objects.equals(runId, that.runId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(metrics, params, runId, tags); + } + + @Override + public String toString() { + return new ToStringer(LogBatchPb.class) + .add("metrics", metrics) + .add("params", params) + .add("runId", runId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java index 3281aba0d..a1ff23c38 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogBatchResponse.LogBatchResponseSerializer.class) +@JsonDeserialize(using = LogBatchResponse.LogBatchResponseDeserializer.class) public class LogBatchResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogBatchResponse.class).toString(); } + + LogBatchResponsePb toPb() { + LogBatchResponsePb pb = new LogBatchResponsePb(); + + return pb; + } + + static LogBatchResponse fromPb(LogBatchResponsePb pb) { + LogBatchResponse model = new LogBatchResponse(); + + return model; + } + + public static class LogBatchResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogBatchResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogBatchResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogBatchResponseDeserializer extends JsonDeserializer { + @Override + public LogBatchResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogBatchResponsePb pb = mapper.readValue(p, LogBatchResponsePb.class); + return LogBatchResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponsePb.java new file mode 100755 index 000000000..64f06545a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogBatchResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogBatchResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java index 9ae8ed0bd..4c75980e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LogInputs.LogInputsSerializer.class) +@JsonDeserialize(using = LogInputs.LogInputsDeserializer.class) public class LogInputs { /** Dataset inputs */ - @JsonProperty("datasets") private Collection datasets; /** Model inputs */ - @JsonProperty("models") private Collection models; /** ID of the run to log under */ - @JsonProperty("run_id") private String runId; public LogInputs setDatasets(Collection datasets) { @@ -72,4 +80,41 @@ public String toString() { .add("runId", runId) .toString(); } + + LogInputsPb toPb() { + LogInputsPb pb = new LogInputsPb(); + pb.setDatasets(datasets); + pb.setModels(models); + pb.setRunId(runId); + + return pb; + } + + static LogInputs fromPb(LogInputsPb pb) { + LogInputs model = new LogInputs(); + model.setDatasets(pb.getDatasets()); + model.setModels(pb.getModels()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class LogInputsSerializer extends JsonSerializer { + @Override + public void serialize(LogInputs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogInputsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogInputsDeserializer extends JsonDeserializer { + @Override + public LogInputs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogInputsPb pb = mapper.readValue(p, LogInputsPb.class); + return LogInputs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsPb.java new file mode 100755 index 000000000..646140427 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LogInputsPb { + @JsonProperty("datasets") + private Collection datasets; + + @JsonProperty("models") + private Collection models; + + @JsonProperty("run_id") + private String runId; + + public LogInputsPb setDatasets(Collection datasets) { + this.datasets = datasets; + return this; + } + + public Collection getDatasets() { + return datasets; + } + + public LogInputsPb setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + public LogInputsPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogInputsPb that = (LogInputsPb) o; + return Objects.equals(datasets, that.datasets) + && Objects.equals(models, that.models) + && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(datasets, models, runId); + } + + @Override + public String toString() { + return new ToStringer(LogInputsPb.class) + .add("datasets", datasets) + .add("models", models) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java index b09b9a878..4c2c65fcc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogInputsResponse.LogInputsResponseSerializer.class) +@JsonDeserialize(using = LogInputsResponse.LogInputsResponseDeserializer.class) public class LogInputsResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogInputsResponse.class).toString(); } + + LogInputsResponsePb toPb() { + LogInputsResponsePb pb = new LogInputsResponsePb(); + + return pb; + } + + static LogInputsResponse fromPb(LogInputsResponsePb pb) { + LogInputsResponse model = new LogInputsResponse(); + + return model; + } + + public static class LogInputsResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogInputsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogInputsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogInputsResponseDeserializer extends JsonDeserializer { + @Override + public LogInputsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogInputsResponsePb pb = mapper.readValue(p, LogInputsResponsePb.class); + return LogInputsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponsePb.java new file mode 100755 index 000000000..eb820cffa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogInputsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogInputsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java index 39472485d..122904ec2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LogLoggedModelParamsRequest.LogLoggedModelParamsRequestSerializer.class) +@JsonDeserialize(using = LogLoggedModelParamsRequest.LogLoggedModelParamsRequestDeserializer.class) public class LogLoggedModelParamsRequest { /** The ID of the logged model to log params for. */ - @JsonIgnore private String modelId; + private String modelId; /** Parameters to attach to the model. */ - @JsonProperty("params") private Collection params; public LogLoggedModelParamsRequest setModelId(String modelId) { @@ -56,4 +65,43 @@ public String toString() { .add("params", params) .toString(); } + + LogLoggedModelParamsRequestPb toPb() { + LogLoggedModelParamsRequestPb pb = new LogLoggedModelParamsRequestPb(); + pb.setModelId(modelId); + pb.setParams(params); + + return pb; + } + + static LogLoggedModelParamsRequest fromPb(LogLoggedModelParamsRequestPb pb) { + LogLoggedModelParamsRequest model = new LogLoggedModelParamsRequest(); + model.setModelId(pb.getModelId()); + model.setParams(pb.getParams()); + + return model; + } + + public static class LogLoggedModelParamsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + LogLoggedModelParamsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogLoggedModelParamsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogLoggedModelParamsRequestDeserializer + extends JsonDeserializer { + @Override + public LogLoggedModelParamsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogLoggedModelParamsRequestPb pb = mapper.readValue(p, LogLoggedModelParamsRequestPb.class); + return LogLoggedModelParamsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestPb.java new file mode 100755 index 000000000..85a3c0e86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LogLoggedModelParamsRequestPb { + @JsonIgnore private String modelId; + + @JsonProperty("params") + private Collection params; + + public LogLoggedModelParamsRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public LogLoggedModelParamsRequestPb setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogLoggedModelParamsRequestPb that = (LogLoggedModelParamsRequestPb) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(params, that.params); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, params); + } + + @Override + public String toString() { + return new ToStringer(LogLoggedModelParamsRequestPb.class) + .add("modelId", modelId) + .add("params", params) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java index 770602c54..c3485d14e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java @@ -4,9 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = LogLoggedModelParamsRequestResponse.LogLoggedModelParamsRequestResponseSerializer.class) +@JsonDeserialize( + using = + LogLoggedModelParamsRequestResponse.LogLoggedModelParamsRequestResponseDeserializer.class) public class LogLoggedModelParamsRequestResponse { @Override @@ -25,4 +40,40 @@ public int hashCode() { public String toString() { return new ToStringer(LogLoggedModelParamsRequestResponse.class).toString(); } + + LogLoggedModelParamsRequestResponsePb toPb() { + LogLoggedModelParamsRequestResponsePb pb = new LogLoggedModelParamsRequestResponsePb(); + + return pb; + } + + static LogLoggedModelParamsRequestResponse fromPb(LogLoggedModelParamsRequestResponsePb pb) { + LogLoggedModelParamsRequestResponse model = new LogLoggedModelParamsRequestResponse(); + + return model; + } + + public static class LogLoggedModelParamsRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + LogLoggedModelParamsRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogLoggedModelParamsRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogLoggedModelParamsRequestResponseDeserializer + extends JsonDeserializer { + @Override + public LogLoggedModelParamsRequestResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogLoggedModelParamsRequestResponsePb pb = + mapper.readValue(p, LogLoggedModelParamsRequestResponsePb.class); + return LogLoggedModelParamsRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponsePb.java new file mode 100755 index 000000000..19b8311d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogLoggedModelParamsRequestResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogLoggedModelParamsRequestResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java index 7e621f079..501ee5291 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java @@ -4,54 +4,56 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogMetric.LogMetricSerializer.class) +@JsonDeserialize(using = LogMetric.LogMetricDeserializer.class) public class LogMetric { /** * Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that * uniquely identifies it within datasets of the same name. */ - @JsonProperty("dataset_digest") private String datasetDigest; /** * The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, * “fantastic-elk-3” */ - @JsonProperty("dataset_name") private String datasetName; /** Name of the metric. */ - @JsonProperty("key") private String key; /** ID of the logged model associated with the metric, if applicable */ - @JsonProperty("model_id") private String modelId; /** ID of the run under which to log the metric. Must be provided. */ - @JsonProperty("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will * be removed in a future MLflow version. */ - @JsonProperty("run_uuid") private String runUuid; /** Step at which to log the metric */ - @JsonProperty("step") private Long step; /** Unix timestamp in milliseconds at the time metric was logged. */ - @JsonProperty("timestamp") private Long timestamp; /** Double value of the metric being logged. */ - @JsonProperty("value") private Double value; public LogMetric setDatasetDigest(String datasetDigest) { @@ -171,4 +173,53 @@ public String toString() { .add("value", value) .toString(); } + + LogMetricPb toPb() { + LogMetricPb pb = new LogMetricPb(); + pb.setDatasetDigest(datasetDigest); + pb.setDatasetName(datasetName); + pb.setKey(key); + pb.setModelId(modelId); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + pb.setStep(step); + pb.setTimestamp(timestamp); + pb.setValue(value); + + return pb; + } + + static LogMetric fromPb(LogMetricPb pb) { + LogMetric model = new LogMetric(); + model.setDatasetDigest(pb.getDatasetDigest()); + model.setDatasetName(pb.getDatasetName()); + model.setKey(pb.getKey()); + model.setModelId(pb.getModelId()); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + model.setStep(pb.getStep()); + model.setTimestamp(pb.getTimestamp()); + model.setValue(pb.getValue()); + + return model; + } + + public static class LogMetricSerializer extends JsonSerializer { + @Override + public void serialize(LogMetric value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogMetricPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogMetricDeserializer extends JsonDeserializer { + @Override + public LogMetric deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogMetricPb pb = mapper.readValue(p, LogMetricPb.class); + return LogMetric.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricPb.java new file mode 100755 index 000000000..97cd65fbb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricPb.java @@ -0,0 +1,156 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LogMetricPb { + @JsonProperty("dataset_digest") + private String datasetDigest; + + @JsonProperty("dataset_name") + private String datasetName; + + @JsonProperty("key") + private String key; + + @JsonProperty("model_id") + private String modelId; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_uuid") + private String runUuid; + + @JsonProperty("step") + private Long step; + + @JsonProperty("timestamp") + private Long timestamp; + + @JsonProperty("value") + private Double value; + + public LogMetricPb setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public LogMetricPb setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + public LogMetricPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LogMetricPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public LogMetricPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public LogMetricPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + public LogMetricPb setStep(Long step) { + this.step = step; + return this; + } + + public Long getStep() { + return step; + } + + public LogMetricPb setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + public Long getTimestamp() { + return timestamp; + } + + public LogMetricPb setValue(Double value) { + this.value = value; + return this; + } + + public Double getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogMetricPb that = (LogMetricPb) o; + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(key, that.key) + && Objects.equals(modelId, that.modelId) + && Objects.equals(runId, that.runId) + && Objects.equals(runUuid, that.runUuid) + && Objects.equals(step, that.step) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + datasetDigest, datasetName, key, modelId, runId, runUuid, step, timestamp, value); + } + + @Override + public String toString() { + return new ToStringer(LogMetricPb.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .add("key", key) + .add("modelId", modelId) + .add("runId", runId) + .add("runUuid", runUuid) + .add("step", step) + .add("timestamp", timestamp) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java index 18ac44107..a8fe180a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogMetricResponse.LogMetricResponseSerializer.class) +@JsonDeserialize(using = LogMetricResponse.LogMetricResponseDeserializer.class) public class LogMetricResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogMetricResponse.class).toString(); } + + LogMetricResponsePb toPb() { + LogMetricResponsePb pb = new LogMetricResponsePb(); + + return pb; + } + + static LogMetricResponse fromPb(LogMetricResponsePb pb) { + LogMetricResponse model = new LogMetricResponse(); + + return model; + } + + public static class LogMetricResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogMetricResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogMetricResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogMetricResponseDeserializer extends JsonDeserializer { + @Override + public LogMetricResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogMetricResponsePb pb = mapper.readValue(p, LogMetricResponsePb.class); + return LogMetricResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponsePb.java new file mode 100755 index 000000000..966542062 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogMetricResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogMetricResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java index 22a1aac3b..9a0043299 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogModel.LogModelSerializer.class) +@JsonDeserialize(using = LogModel.LogModelDeserializer.class) public class LogModel { /** MLmodel file in json format. */ - @JsonProperty("model_json") private String modelJson; /** ID of the run to log under */ - @JsonProperty("run_id") private String runId; public LogModel setModelJson(String modelJson) { @@ -55,4 +64,39 @@ public String toString() { .add("runId", runId) .toString(); } + + LogModelPb toPb() { + LogModelPb pb = new LogModelPb(); + pb.setModelJson(modelJson); + pb.setRunId(runId); + + return pb; + } + + static LogModel fromPb(LogModelPb pb) { + LogModel model = new LogModel(); + model.setModelJson(pb.getModelJson()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class LogModelSerializer extends JsonSerializer { + @Override + public void serialize(LogModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogModelDeserializer extends JsonDeserializer { + @Override + public LogModel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogModelPb pb = mapper.readValue(p, LogModelPb.class); + return LogModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelPb.java new file mode 100755 index 000000000..f6834fa8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LogModelPb { + @JsonProperty("model_json") + private String modelJson; + + @JsonProperty("run_id") + private String runId; + + public LogModelPb setModelJson(String modelJson) { + this.modelJson = modelJson; + return this; + } + + public String getModelJson() { + return modelJson; + } + + public LogModelPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogModelPb that = (LogModelPb) o; + return Objects.equals(modelJson, that.modelJson) && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(modelJson, runId); + } + + @Override + public String toString() { + return new ToStringer(LogModelPb.class) + .add("modelJson", modelJson) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java index 937328f29..84609b3b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogModelResponse.LogModelResponseSerializer.class) +@JsonDeserialize(using = LogModelResponse.LogModelResponseDeserializer.class) public class LogModelResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogModelResponse.class).toString(); } + + LogModelResponsePb toPb() { + LogModelResponsePb pb = new LogModelResponsePb(); + + return pb; + } + + static LogModelResponse fromPb(LogModelResponsePb pb) { + LogModelResponse model = new LogModelResponse(); + + return model; + } + + public static class LogModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogModelResponseDeserializer extends JsonDeserializer { + @Override + public LogModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogModelResponsePb pb = mapper.readValue(p, LogModelResponsePb.class); + return LogModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponsePb.java new file mode 100755 index 000000000..a50d57938 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogModelResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogModelResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java index 00fd2638a..10961e7b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LogOutputsRequest.LogOutputsRequestSerializer.class) +@JsonDeserialize(using = LogOutputsRequest.LogOutputsRequestDeserializer.class) public class LogOutputsRequest { /** The model outputs from the Run. */ - @JsonProperty("models") private Collection models; /** The ID of the Run from which to log outputs. */ - @JsonProperty("run_id") private String runId; public LogOutputsRequest setModels(Collection models) { @@ -56,4 +65,40 @@ public String toString() { .add("runId", runId) .toString(); } + + LogOutputsRequestPb toPb() { + LogOutputsRequestPb pb = new LogOutputsRequestPb(); + pb.setModels(models); + pb.setRunId(runId); + + return pb; + } + + static LogOutputsRequest fromPb(LogOutputsRequestPb pb) { + LogOutputsRequest model = new LogOutputsRequest(); + model.setModels(pb.getModels()); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class LogOutputsRequestSerializer extends JsonSerializer { + @Override + public void serialize(LogOutputsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogOutputsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogOutputsRequestDeserializer extends JsonDeserializer { + @Override + public LogOutputsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogOutputsRequestPb pb = mapper.readValue(p, LogOutputsRequestPb.class); + return LogOutputsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequestPb.java new file mode 100755 index 000000000..07a2617b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LogOutputsRequestPb { + @JsonProperty("models") + private Collection models; + + @JsonProperty("run_id") + private String runId; + + public LogOutputsRequestPb setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + public LogOutputsRequestPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogOutputsRequestPb that = (LogOutputsRequestPb) o; + return Objects.equals(models, that.models) && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(models, runId); + } + + @Override + public String toString() { + return new ToStringer(LogOutputsRequestPb.class) + .add("models", models) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java index 8e3c962b3..af92a6884 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogOutputsResponse.LogOutputsResponseSerializer.class) +@JsonDeserialize(using = LogOutputsResponse.LogOutputsResponseDeserializer.class) public class LogOutputsResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogOutputsResponse.class).toString(); } + + LogOutputsResponsePb toPb() { + LogOutputsResponsePb pb = new LogOutputsResponsePb(); + + return pb; + } + + static LogOutputsResponse fromPb(LogOutputsResponsePb pb) { + LogOutputsResponse model = new LogOutputsResponse(); + + return model; + } + + public static class LogOutputsResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogOutputsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogOutputsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogOutputsResponseDeserializer extends JsonDeserializer { + @Override + public LogOutputsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogOutputsResponsePb pb = mapper.readValue(p, LogOutputsResponsePb.class); + return LogOutputsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponsePb.java new file mode 100755 index 000000000..7ea0a4991 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogOutputsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogOutputsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java index 3ea5a20ed..f32051b21 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogParam.LogParamSerializer.class) +@JsonDeserialize(using = LogParam.LogParamDeserializer.class) public class LogParam { /** Name of the param. Maximum size is 255 bytes. */ - @JsonProperty("key") private String key; /** ID of the run under which to log the param. Must be provided. */ - @JsonProperty("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will * be removed in a future MLflow version. */ - @JsonProperty("run_uuid") private String runUuid; /** String value of the param being logged. Maximum size is 500 bytes. */ - @JsonProperty("value") private String value; public LogParam setKey(String key) { @@ -89,4 +96,43 @@ public String toString() { .add("value", value) .toString(); } + + LogParamPb toPb() { + LogParamPb pb = new LogParamPb(); + pb.setKey(key); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + pb.setValue(value); + + return pb; + } + + static LogParam fromPb(LogParamPb pb) { + LogParam model = new LogParam(); + model.setKey(pb.getKey()); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + model.setValue(pb.getValue()); + + return model; + } + + public static class LogParamSerializer extends JsonSerializer { + @Override + public void serialize(LogParam value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogParamPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogParamDeserializer extends JsonDeserializer { + @Override + public LogParam deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogParamPb pb = mapper.readValue(p, LogParamPb.class); + return LogParam.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamPb.java new file mode 100755 index 000000000..43972f9ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LogParamPb { + @JsonProperty("key") + private String key; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_uuid") + private String runUuid; + + @JsonProperty("value") + private String value; + + public LogParamPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LogParamPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public LogParamPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + public LogParamPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogParamPb that = (LogParamPb) o; + return Objects.equals(key, that.key) + && Objects.equals(runId, that.runId) + && Objects.equals(runUuid, that.runUuid) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, runId, runUuid, value); + } + + @Override + public String toString() { + return new ToStringer(LogParamPb.class) + .add("key", key) + .add("runId", runId) + .add("runUuid", runUuid) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java index 5a11a026b..b16d9e7c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LogParamResponse.LogParamResponseSerializer.class) +@JsonDeserialize(using = LogParamResponse.LogParamResponseDeserializer.class) public class LogParamResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(LogParamResponse.class).toString(); } + + LogParamResponsePb toPb() { + LogParamResponsePb pb = new LogParamResponsePb(); + + return pb; + } + + static LogParamResponse fromPb(LogParamResponsePb pb) { + LogParamResponse model = new LogParamResponse(); + + return model; + } + + public static class LogParamResponseSerializer extends JsonSerializer { + @Override + public void serialize(LogParamResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogParamResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogParamResponseDeserializer extends JsonDeserializer { + @Override + public LogParamResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogParamResponsePb pb = mapper.readValue(p, LogParamResponsePb.class); + return LogParamResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponsePb.java new file mode 100755 index 000000000..7a400e272 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class LogParamResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogParamResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java index 5a57b30c2..24a3c9fbb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,13 +21,13 @@ * linked run metrics. */ @Generated +@JsonSerialize(using = LoggedModel.LoggedModelSerializer.class) +@JsonDeserialize(using = LoggedModel.LoggedModelDeserializer.class) public class LoggedModel { /** The params and metrics attached to the logged model. */ - @JsonProperty("data") private LoggedModelData data; /** The logged model attributes such as model ID, status, tags, etc. */ - @JsonProperty("info") private LoggedModelInfo info; public LoggedModel setData(LoggedModelData data) { @@ -56,4 +65,39 @@ public int hashCode() { public String toString() { return new ToStringer(LoggedModel.class).add("data", data).add("info", info).toString(); } + + LoggedModelPb toPb() { + LoggedModelPb pb = new LoggedModelPb(); + pb.setData(data); + pb.setInfo(info); + + return pb; + } + + static LoggedModel fromPb(LoggedModelPb pb) { + LoggedModel model = new LoggedModel(); + model.setData(pb.getData()); + model.setInfo(pb.getInfo()); + + return model; + } + + public static class LoggedModelSerializer extends JsonSerializer { + @Override + public void serialize(LoggedModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LoggedModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LoggedModelDeserializer extends JsonDeserializer { + @Override + public LoggedModel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LoggedModelPb pb = mapper.readValue(p, LoggedModelPb.class); + return LoggedModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java index bdd0efc9c..a77c9342a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A LoggedModelData message includes logged model params and linked metrics. */ @Generated +@JsonSerialize(using = LoggedModelData.LoggedModelDataSerializer.class) +@JsonDeserialize(using = LoggedModelData.LoggedModelDataDeserializer.class) public class LoggedModelData { /** Performance metrics linked to the model. */ - @JsonProperty("metrics") private Collection metrics; /** Immutable string key-value pairs of the model. */ - @JsonProperty("params") private Collection params; public LoggedModelData setMetrics(Collection metrics) { @@ -57,4 +66,40 @@ public String toString() { .add("params", params) .toString(); } + + LoggedModelDataPb toPb() { + LoggedModelDataPb pb = new LoggedModelDataPb(); + pb.setMetrics(metrics); + pb.setParams(params); + + return pb; + } + + static LoggedModelData fromPb(LoggedModelDataPb pb) { + LoggedModelData model = new LoggedModelData(); + model.setMetrics(pb.getMetrics()); + model.setParams(pb.getParams()); + + return model; + } + + public static class LoggedModelDataSerializer extends JsonSerializer { + @Override + public void serialize(LoggedModelData value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LoggedModelDataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LoggedModelDataDeserializer extends JsonDeserializer { + @Override + public LoggedModelData deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LoggedModelDataPb pb = mapper.readValue(p, LoggedModelDataPb.class); + return LoggedModelData.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelDataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelDataPb.java new file mode 100755 index 000000000..73fc16b64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelDataPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A LoggedModelData message includes logged model params and linked metrics. */ +@Generated +class LoggedModelDataPb { + @JsonProperty("metrics") + private Collection metrics; + + @JsonProperty("params") + private Collection params; + + public LoggedModelDataPb setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public LoggedModelDataPb setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelDataPb that = (LoggedModelDataPb) o; + return Objects.equals(metrics, that.metrics) && Objects.equals(params, that.params); + } + + @Override + public int hashCode() { + return Objects.hash(metrics, params); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelDataPb.class) + .add("metrics", metrics) + .add("params", params) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java index e6f626243..a4e3871f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java @@ -4,59 +4,58 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A LoggedModelInfo includes logged model attributes, tags, and registration info. */ @Generated +@JsonSerialize(using = LoggedModelInfo.LoggedModelInfoSerializer.class) +@JsonDeserialize(using = LoggedModelInfo.LoggedModelInfoDeserializer.class) public class LoggedModelInfo { /** The URI of the directory where model artifacts are stored. */ - @JsonProperty("artifact_uri") private String artifactUri; /** The timestamp when the model was created in milliseconds since the UNIX epoch. */ - @JsonProperty("creation_timestamp_ms") private Long creationTimestampMs; /** The ID of the user or principal that created the model. */ - @JsonProperty("creator_id") private Long creatorId; /** The ID of the experiment that owns the model. */ - @JsonProperty("experiment_id") private String experimentId; /** The timestamp when the model was last updated in milliseconds since the UNIX epoch. */ - @JsonProperty("last_updated_timestamp_ms") private Long lastUpdatedTimestampMs; /** The unique identifier for the logged model. */ - @JsonProperty("model_id") private String modelId; /** The type of model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``. */ - @JsonProperty("model_type") private String modelType; /** The name of the model. */ - @JsonProperty("name") private String name; /** The ID of the run that created the model. */ - @JsonProperty("source_run_id") private String sourceRunId; /** The status of whether or not the model is ready for use. */ - @JsonProperty("status") private LoggedModelStatus status; /** Details on the current model status. */ - @JsonProperty("status_message") private String statusMessage; /** Mutable string key-value pairs set on the model. */ - @JsonProperty("tags") private Collection tags; public LoggedModelInfo setArtifactUri(String artifactUri) { @@ -220,4 +219,60 @@ public String toString() { .add("tags", tags) .toString(); } + + LoggedModelInfoPb toPb() { + LoggedModelInfoPb pb = new LoggedModelInfoPb(); + pb.setArtifactUri(artifactUri); + pb.setCreationTimestampMs(creationTimestampMs); + pb.setCreatorId(creatorId); + pb.setExperimentId(experimentId); + pb.setLastUpdatedTimestampMs(lastUpdatedTimestampMs); + pb.setModelId(modelId); + pb.setModelType(modelType); + pb.setName(name); + pb.setSourceRunId(sourceRunId); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + pb.setTags(tags); + + return pb; + } + + static LoggedModelInfo fromPb(LoggedModelInfoPb pb) { + LoggedModelInfo model = new LoggedModelInfo(); + model.setArtifactUri(pb.getArtifactUri()); + model.setCreationTimestampMs(pb.getCreationTimestampMs()); + model.setCreatorId(pb.getCreatorId()); + model.setExperimentId(pb.getExperimentId()); + model.setLastUpdatedTimestampMs(pb.getLastUpdatedTimestampMs()); + model.setModelId(pb.getModelId()); + model.setModelType(pb.getModelType()); + model.setName(pb.getName()); + model.setSourceRunId(pb.getSourceRunId()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + model.setTags(pb.getTags()); + + return model; + } + + public static class LoggedModelInfoSerializer extends JsonSerializer { + @Override + public void serialize(LoggedModelInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LoggedModelInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LoggedModelInfoDeserializer extends JsonDeserializer { + @Override + public LoggedModelInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LoggedModelInfoPb pb = mapper.readValue(p, LoggedModelInfoPb.class); + return LoggedModelInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfoPb.java new file mode 100755 index 000000000..2682e85ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfoPb.java @@ -0,0 +1,211 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A LoggedModelInfo includes logged model attributes, tags, and registration info. */ +@Generated +class LoggedModelInfoPb { + @JsonProperty("artifact_uri") + private String artifactUri; + + @JsonProperty("creation_timestamp_ms") + private Long creationTimestampMs; + + @JsonProperty("creator_id") + private Long creatorId; + + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("last_updated_timestamp_ms") + private Long lastUpdatedTimestampMs; + + @JsonProperty("model_id") + private String modelId; + + @JsonProperty("model_type") + private String modelType; + + @JsonProperty("name") + private String name; + + @JsonProperty("source_run_id") + private String sourceRunId; + + @JsonProperty("status") + private LoggedModelStatus status; + + @JsonProperty("status_message") + private String statusMessage; + + @JsonProperty("tags") + private Collection tags; + + public LoggedModelInfoPb setArtifactUri(String artifactUri) { + this.artifactUri = artifactUri; + return this; + } + + public String getArtifactUri() { + return artifactUri; + } + + public LoggedModelInfoPb setCreationTimestampMs(Long creationTimestampMs) { + this.creationTimestampMs = creationTimestampMs; + return this; + } + + public Long getCreationTimestampMs() { + return creationTimestampMs; + } + + public LoggedModelInfoPb setCreatorId(Long creatorId) { + this.creatorId = creatorId; + return this; + } + + public Long getCreatorId() { + return creatorId; + } + + public LoggedModelInfoPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public LoggedModelInfoPb setLastUpdatedTimestampMs(Long lastUpdatedTimestampMs) { + this.lastUpdatedTimestampMs = lastUpdatedTimestampMs; + return this; + } + + public Long getLastUpdatedTimestampMs() { + return lastUpdatedTimestampMs; + } + + public LoggedModelInfoPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public LoggedModelInfoPb setModelType(String modelType) { + this.modelType = modelType; + return this; + } + + public String getModelType() { + return modelType; + } + + public LoggedModelInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LoggedModelInfoPb setSourceRunId(String sourceRunId) { + this.sourceRunId = sourceRunId; + return this; + } + + public String getSourceRunId() { + return sourceRunId; + } + + public LoggedModelInfoPb setStatus(LoggedModelStatus status) { + this.status = status; + return this; + } + + public LoggedModelStatus getStatus() { + return status; + } + + public LoggedModelInfoPb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public LoggedModelInfoPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelInfoPb that = (LoggedModelInfoPb) o; + return Objects.equals(artifactUri, that.artifactUri) + && Objects.equals(creationTimestampMs, that.creationTimestampMs) + && Objects.equals(creatorId, that.creatorId) + && Objects.equals(experimentId, that.experimentId) + && Objects.equals(lastUpdatedTimestampMs, that.lastUpdatedTimestampMs) + && Objects.equals(modelId, that.modelId) + && Objects.equals(modelType, that.modelType) + && Objects.equals(name, that.name) + && Objects.equals(sourceRunId, that.sourceRunId) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + artifactUri, + creationTimestampMs, + creatorId, + experimentId, + lastUpdatedTimestampMs, + modelId, + modelType, + name, + sourceRunId, + status, + statusMessage, + tags); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelInfoPb.class) + .add("artifactUri", artifactUri) + .add("creationTimestampMs", creationTimestampMs) + .add("creatorId", creatorId) + .add("experimentId", experimentId) + .add("lastUpdatedTimestampMs", lastUpdatedTimestampMs) + .add("modelId", modelId) + .add("modelType", modelType) + .add("name", name) + .add("sourceRunId", sourceRunId) + .add("status", status) + .add("statusMessage", statusMessage) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java index 1d3ccc2b8..00fafe7f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Parameter associated with a LoggedModel. */ @Generated +@JsonSerialize(using = LoggedModelParameter.LoggedModelParameterSerializer.class) +@JsonDeserialize(using = LoggedModelParameter.LoggedModelParameterDeserializer.class) public class LoggedModelParameter { /** The key identifying this param. */ - @JsonProperty("key") private String key; /** The value of this param. */ - @JsonProperty("value") private String value; public LoggedModelParameter setKey(String key) { @@ -56,4 +65,42 @@ public String toString() { .add("value", value) .toString(); } + + LoggedModelParameterPb toPb() { + LoggedModelParameterPb pb = new LoggedModelParameterPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static LoggedModelParameter fromPb(LoggedModelParameterPb pb) { + LoggedModelParameter model = new LoggedModelParameter(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class LoggedModelParameterSerializer extends JsonSerializer { + @Override + public void serialize( + LoggedModelParameter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LoggedModelParameterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LoggedModelParameterDeserializer + extends JsonDeserializer { + @Override + public LoggedModelParameter deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LoggedModelParameterPb pb = mapper.readValue(p, LoggedModelParameterPb.class); + return LoggedModelParameter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameterPb.java new file mode 100755 index 000000000..7092dde43 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameterPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Parameter associated with a LoggedModel. */ +@Generated +class LoggedModelParameterPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public LoggedModelParameterPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LoggedModelParameterPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelParameterPb that = (LoggedModelParameterPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelParameterPb.class) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelPb.java new file mode 100755 index 000000000..1399289ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A logged model message includes logged model attributes, tags, registration info, params, and + * linked run metrics. + */ +@Generated +class LoggedModelPb { + @JsonProperty("data") + private LoggedModelData data; + + @JsonProperty("info") + private LoggedModelInfo info; + + public LoggedModelPb setData(LoggedModelData data) { + this.data = data; + return this; + } + + public LoggedModelData getData() { + return data; + } + + public LoggedModelPb setInfo(LoggedModelInfo info) { + this.info = info; + return this; + } + + public LoggedModelInfo getInfo() { + return info; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelPb that = (LoggedModelPb) o; + return Objects.equals(data, that.data) && Objects.equals(info, that.info); + } + + @Override + public int hashCode() { + return Objects.hash(data, info); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelPb.class).add("data", data).add("info", info).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java index bd80c2d14..58f9a34ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Tag for a LoggedModel. */ @Generated +@JsonSerialize(using = LoggedModelTag.LoggedModelTagSerializer.class) +@JsonDeserialize(using = LoggedModelTag.LoggedModelTagDeserializer.class) public class LoggedModelTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public LoggedModelTag setKey(String key) { @@ -53,4 +62,40 @@ public int hashCode() { public String toString() { return new ToStringer(LoggedModelTag.class).add("key", key).add("value", value).toString(); } + + LoggedModelTagPb toPb() { + LoggedModelTagPb pb = new LoggedModelTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static LoggedModelTag fromPb(LoggedModelTagPb pb) { + LoggedModelTag model = new LoggedModelTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class LoggedModelTagSerializer extends JsonSerializer { + @Override + public void serialize(LoggedModelTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LoggedModelTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LoggedModelTagDeserializer extends JsonDeserializer { + @Override + public LoggedModelTag deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LoggedModelTagPb pb = mapper.readValue(p, LoggedModelTagPb.class); + return LoggedModelTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTagPb.java new file mode 100755 index 000000000..0d7521528 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTagPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Tag for a LoggedModel. */ +@Generated +class LoggedModelTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public LoggedModelTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LoggedModelTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelTagPb that = (LoggedModelTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java index 62e7340d9..6ace1be3d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java @@ -4,51 +4,54 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Metric associated with a run, represented as a key-value pair. */ @Generated +@JsonSerialize(using = Metric.MetricSerializer.class) +@JsonDeserialize(using = Metric.MetricDeserializer.class) public class Metric { /** * The dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset * that uniquely identifies it within datasets of the same name. */ - @JsonProperty("dataset_digest") private String datasetDigest; /** * The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, * “fantastic-elk-3” */ - @JsonProperty("dataset_name") private String datasetName; /** The key identifying the metric. */ - @JsonProperty("key") private String key; /** * The ID of the logged model or registered model version associated with the metric, if * applicable. */ - @JsonProperty("model_id") private String modelId; /** The ID of the run containing the metric. */ - @JsonProperty("run_id") private String runId; /** The step at which the metric was logged. */ - @JsonProperty("step") private Long step; /** The timestamp at which the metric was recorded. */ - @JsonProperty("timestamp") private Long timestamp; /** The value of the metric. */ - @JsonProperty("value") private Double value; public Metric setDatasetDigest(String datasetDigest) { @@ -156,4 +159,51 @@ public String toString() { .add("value", value) .toString(); } + + MetricPb toPb() { + MetricPb pb = new MetricPb(); + pb.setDatasetDigest(datasetDigest); + pb.setDatasetName(datasetName); + pb.setKey(key); + pb.setModelId(modelId); + pb.setRunId(runId); + pb.setStep(step); + pb.setTimestamp(timestamp); + pb.setValue(value); + + return pb; + } + + static Metric fromPb(MetricPb pb) { + Metric model = new Metric(); + model.setDatasetDigest(pb.getDatasetDigest()); + model.setDatasetName(pb.getDatasetName()); + model.setKey(pb.getKey()); + model.setModelId(pb.getModelId()); + model.setRunId(pb.getRunId()); + model.setStep(pb.getStep()); + model.setTimestamp(pb.getTimestamp()); + model.setValue(pb.getValue()); + + return model; + } + + public static class MetricSerializer extends JsonSerializer { + @Override + public void serialize(Metric value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MetricPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MetricDeserializer extends JsonDeserializer { + @Override + public Metric deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MetricPb pb = mapper.readValue(p, MetricPb.class); + return Metric.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MetricPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MetricPb.java new file mode 100755 index 000000000..578bee789 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MetricPb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metric associated with a run, represented as a key-value pair. */ +@Generated +class MetricPb { + @JsonProperty("dataset_digest") + private String datasetDigest; + + @JsonProperty("dataset_name") + private String datasetName; + + @JsonProperty("key") + private String key; + + @JsonProperty("model_id") + private String modelId; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("step") + private Long step; + + @JsonProperty("timestamp") + private Long timestamp; + + @JsonProperty("value") + private Double value; + + public MetricPb setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public MetricPb setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + public MetricPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public MetricPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public MetricPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public MetricPb setStep(Long step) { + this.step = step; + return this; + } + + public Long getStep() { + return step; + } + + public MetricPb setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + public Long getTimestamp() { + return timestamp; + } + + public MetricPb setValue(Double value) { + this.value = value; + return this; + } + + public Double getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MetricPb that = (MetricPb) o; + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(key, that.key) + && Objects.equals(modelId, that.modelId) + && Objects.equals(runId, that.runId) + && Objects.equals(step, that.step) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(datasetDigest, datasetName, key, modelId, runId, step, timestamp, value); + } + + @Override + public String toString() { + return new ToStringer(MetricPb.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .add("key", key) + .add("modelId", modelId) + .add("runId", runId) + .add("step", step) + .add("timestamp", timestamp) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java index a736a5978..01d1e1f0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java @@ -4,41 +4,45 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Model.ModelSerializer.class) +@JsonDeserialize(using = Model.ModelDeserializer.class) public class Model { /** Timestamp recorded when this `registered_model` was created. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** Description of this `registered_model`. */ - @JsonProperty("description") private String description; /** Timestamp recorded when metadata for this `registered_model` was last updated. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** * Collection of latest model versions for each stage. Only contains models with current `READY` * status. */ - @JsonProperty("latest_versions") private Collection latestVersions; /** Unique name for the model. */ - @JsonProperty("name") private String name; /** Tags: Additional metadata key-value pairs for this `registered_model`. */ - @JsonProperty("tags") private Collection tags; /** User that created this `registered_model` */ - @JsonProperty("user_id") private String userId; public Model setCreationTimestamp(Long creationTimestamp) { @@ -136,4 +140,49 @@ public String toString() { .add("userId", userId) .toString(); } + + ModelPb toPb() { + ModelPb pb = new ModelPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setDescription(description); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setLatestVersions(latestVersions); + pb.setName(name); + pb.setTags(tags); + pb.setUserId(userId); + + return pb; + } + + static Model fromPb(ModelPb pb) { + Model model = new Model(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setDescription(pb.getDescription()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setLatestVersions(pb.getLatestVersions()); + model.setName(pb.getName()); + model.setTags(pb.getTags()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class ModelSerializer extends JsonSerializer { + @Override + public void serialize(Model value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelDeserializer extends JsonDeserializer { + @Override + public Model deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelPb pb = mapper.readValue(p, ModelPb.class); + return Model.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java index f4b82307a..7cd7e69f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java @@ -4,49 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ModelDatabricks.ModelDatabricksSerializer.class) +@JsonDeserialize(using = ModelDatabricks.ModelDatabricksDeserializer.class) public class ModelDatabricks { /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** User-specified description for the object. */ - @JsonProperty("description") private String description; /** Unique identifier for the object. */ - @JsonProperty("id") private String id; /** Time of the object at last update, as a Unix timestamp in milliseconds. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Array of model versions, each the latest version for its stage. */ - @JsonProperty("latest_versions") private Collection latestVersions; /** Name of the model. */ - @JsonProperty("name") private String name; /** * Permission level of the requesting user on the object. For what is allowed at each level, see * [MLflow Model permissions](..). */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; /** Array of tags associated with the model. */ - @JsonProperty("tags") private Collection tags; /** The username of the user that created the object. */ - @JsonProperty("user_id") private String userId; public ModelDatabricks setCreationTimestamp(Long creationTimestamp) { @@ -174,4 +176,54 @@ public String toString() { .add("userId", userId) .toString(); } + + ModelDatabricksPb toPb() { + ModelDatabricksPb pb = new ModelDatabricksPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setDescription(description); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setLatestVersions(latestVersions); + pb.setName(name); + pb.setPermissionLevel(permissionLevel); + pb.setTags(tags); + pb.setUserId(userId); + + return pb; + } + + static ModelDatabricks fromPb(ModelDatabricksPb pb) { + ModelDatabricks model = new ModelDatabricks(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setDescription(pb.getDescription()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setLatestVersions(pb.getLatestVersions()); + model.setName(pb.getName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setTags(pb.getTags()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class ModelDatabricksSerializer extends JsonSerializer { + @Override + public void serialize(ModelDatabricks value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelDatabricksPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelDatabricksDeserializer extends JsonDeserializer { + @Override + public ModelDatabricks deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelDatabricksPb pb = mapper.readValue(p, ModelDatabricksPb.class); + return ModelDatabricks.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricksPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricksPb.java new file mode 100755 index 000000000..71053f9e9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricksPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ModelDatabricksPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("description") + private String description; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("latest_versions") + private Collection latestVersions; + + @JsonProperty("name") + private String name; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("user_id") + private String userId; + + public ModelDatabricksPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ModelDatabricksPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ModelDatabricksPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ModelDatabricksPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ModelDatabricksPb setLatestVersions(Collection latestVersions) { + this.latestVersions = latestVersions; + return this; + } + + public Collection getLatestVersions() { + return latestVersions; + } + + public ModelDatabricksPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ModelDatabricksPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ModelDatabricksPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ModelDatabricksPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelDatabricksPb that = (ModelDatabricksPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(description, that.description) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(latestVersions, that.latestVersions) + && Objects.equals(name, that.name) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(tags, that.tags) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + description, + id, + lastUpdatedTimestamp, + latestVersions, + name, + permissionLevel, + tags, + userId); + } + + @Override + public String toString() { + return new ToStringer(ModelDatabricksPb.class) + .add("creationTimestamp", creationTimestamp) + .add("description", description) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("latestVersions", latestVersions) + .add("name", name) + .add("permissionLevel", permissionLevel) + .add("tags", tags) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java index be6cc4713..a724f96f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Represents a LoggedModel or Registered Model Version input to a Run. */ @Generated +@JsonSerialize(using = ModelInput.ModelInputSerializer.class) +@JsonDeserialize(using = ModelInput.ModelInputDeserializer.class) public class ModelInput { /** The unique identifier of the model. */ - @JsonProperty("model_id") private String modelId; public ModelInput setModelId(String modelId) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(ModelInput.class).add("modelId", modelId).toString(); } + + ModelInputPb toPb() { + ModelInputPb pb = new ModelInputPb(); + pb.setModelId(modelId); + + return pb; + } + + static ModelInput fromPb(ModelInputPb pb) { + ModelInput model = new ModelInput(); + model.setModelId(pb.getModelId()); + + return model; + } + + public static class ModelInputSerializer extends JsonSerializer { + @Override + public void serialize(ModelInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelInputDeserializer extends JsonDeserializer { + @Override + public ModelInput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelInputPb pb = mapper.readValue(p, ModelInputPb.class); + return ModelInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInputPb.java new file mode 100755 index 000000000..32053a30f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInputPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a LoggedModel or Registered Model Version input to a Run. */ +@Generated +class ModelInputPb { + @JsonProperty("model_id") + private String modelId; + + public ModelInputPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelInputPb that = (ModelInputPb) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(ModelInputPb.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java index 6581459ee..4dce6ab22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Represents a LoggedModel output of a Run. */ @Generated +@JsonSerialize(using = ModelOutput.ModelOutputSerializer.class) +@JsonDeserialize(using = ModelOutput.ModelOutputDeserializer.class) public class ModelOutput { /** The unique identifier of the model. */ - @JsonProperty("model_id") private String modelId; /** The step at which the model was produced. */ - @JsonProperty("step") private Long step; public ModelOutput setModelId(String modelId) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(ModelOutput.class).add("modelId", modelId).add("step", step).toString(); } + + ModelOutputPb toPb() { + ModelOutputPb pb = new ModelOutputPb(); + pb.setModelId(modelId); + pb.setStep(step); + + return pb; + } + + static ModelOutput fromPb(ModelOutputPb pb) { + ModelOutput model = new ModelOutput(); + model.setModelId(pb.getModelId()); + model.setStep(pb.getStep()); + + return model; + } + + public static class ModelOutputSerializer extends JsonSerializer { + @Override + public void serialize(ModelOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelOutputDeserializer extends JsonDeserializer { + @Override + public ModelOutput deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelOutputPb pb = mapper.readValue(p, ModelOutputPb.class); + return ModelOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutputPb.java new file mode 100755 index 000000000..eb44f7106 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutputPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a LoggedModel output of a Run. */ +@Generated +class ModelOutputPb { + @JsonProperty("model_id") + private String modelId; + + @JsonProperty("step") + private Long step; + + public ModelOutputPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public ModelOutputPb setStep(Long step) { + this.step = step; + return this; + } + + public Long getStep() { + return step; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelOutputPb that = (ModelOutputPb) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(step, that.step); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, step); + } + + @Override + public String toString() { + return new ToStringer(ModelOutputPb.class).add("modelId", modelId).add("step", step).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelPb.java new file mode 100755 index 000000000..28516b55f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelPb.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ModelPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("description") + private String description; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("latest_versions") + private Collection latestVersions; + + @JsonProperty("name") + private String name; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("user_id") + private String userId; + + public ModelPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ModelPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ModelPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ModelPb setLatestVersions(Collection latestVersions) { + this.latestVersions = latestVersions; + return this; + } + + public Collection getLatestVersions() { + return latestVersions; + } + + public ModelPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ModelPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ModelPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelPb that = (ModelPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(description, that.description) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(latestVersions, that.latestVersions) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, description, lastUpdatedTimestamp, latestVersions, name, tags, userId); + } + + @Override + public String toString() { + return new ToStringer(ModelPb.class) + .add("creationTimestamp", creationTimestamp) + .add("description", description) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("latestVersions", latestVersions) + .add("name", name) + .add("tags", tags) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java index 357684e9c..ae95bfd10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java @@ -22,7 +22,7 @@ public ApproveTransitionRequestResponse approveTransitionRequest( String path = "/api/2.0/mlflow/transition-requests/approve"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ApproveTransitionRequestResponse.class); @@ -36,7 +36,7 @@ public CreateCommentResponse createComment(CreateComment request) { String path = "/api/2.0/mlflow/comments/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateCommentResponse.class); @@ -50,7 +50,7 @@ public CreateModelResponse createModel(CreateModelRequest request) { String path = "/api/2.0/mlflow/registered-models/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateModelResponse.class); @@ -64,7 +64,7 @@ public CreateModelVersionResponse createModelVersion(CreateModelVersionRequest r String path = "/api/2.0/mlflow/model-versions/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateModelVersionResponse.class); @@ -78,7 +78,7 @@ public CreateTransitionRequestResponse createTransitionRequest(CreateTransitionR String path = "/api/2.0/mlflow/transition-requests/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateTransitionRequestResponse.class); @@ -92,7 +92,7 @@ public CreateWebhookResponse createWebhook(CreateRegistryWebhook request) { String path = "/api/2.0/mlflow/registry-webhooks/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateWebhookResponse.class); @@ -106,7 +106,7 @@ public void deleteComment(DeleteCommentRequest request) { String path = "/api/2.0/mlflow/comments/delete"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteCommentResponse.class); } catch (IOException e) { @@ -119,7 +119,7 @@ public void deleteModel(DeleteModelRequest request) { String path = "/api/2.0/mlflow/registered-models/delete"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteModelResponse.class); } catch (IOException e) { @@ -132,7 +132,7 @@ public void deleteModelTag(DeleteModelTagRequest request) { String path = "/api/2.0/mlflow/registered-models/delete-tag"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteModelTagResponse.class); } catch (IOException e) { @@ -145,7 +145,7 @@ public void deleteModelVersion(DeleteModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/delete"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteModelVersionResponse.class); } catch (IOException e) { @@ -158,7 +158,7 @@ public void deleteModelVersionTag(DeleteModelVersionTagRequest request) { String path = "/api/2.0/mlflow/model-versions/delete-tag"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteModelVersionTagResponse.class); } catch (IOException e) { @@ -171,7 +171,7 @@ public void deleteTransitionRequest(DeleteTransitionRequestRequest request) { String path = "/api/2.0/mlflow/transition-requests/delete"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteTransitionRequestResponse.class); } catch (IOException e) { @@ -184,7 +184,7 @@ public void deleteWebhook(DeleteWebhookRequest request) { String path = "/api/2.0/mlflow/registry-webhooks/delete"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteWebhookResponse.class); } catch (IOException e) { @@ -197,7 +197,7 @@ public GetLatestVersionsResponse getLatestVersions(GetLatestVersionsRequest requ String path = "/api/2.0/mlflow/registered-models/get-latest-versions"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, GetLatestVersionsResponse.class); @@ -211,7 +211,7 @@ public GetModelResponse getModel(GetModelRequest request) { String path = "/api/2.0/mlflow/databricks/registered-models/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetModelResponse.class); } catch (IOException e) { @@ -224,7 +224,7 @@ public GetModelVersionResponse getModelVersion(GetModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetModelVersionResponse.class); } catch (IOException e) { @@ -238,7 +238,7 @@ public GetModelVersionDownloadUriResponse getModelVersionDownloadUri( String path = "/api/2.0/mlflow/model-versions/get-download-uri"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetModelVersionDownloadUriResponse.class); } catch (IOException e) { @@ -255,7 +255,7 @@ public GetRegisteredModelPermissionLevelsResponse getPermissionLevels( request.getRegisteredModelId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetRegisteredModelPermissionLevelsResponse.class); } catch (IOException e) { @@ -269,7 +269,7 @@ public RegisteredModelPermissions getPermissions(GetRegisteredModelPermissionsRe String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RegisteredModelPermissions.class); } catch (IOException e) { @@ -282,7 +282,7 @@ public ListModelsResponse listModels(ListModelsRequest request) { String path = "/api/2.0/mlflow/registered-models/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListModelsResponse.class); } catch (IOException e) { @@ -296,7 +296,7 @@ public ListTransitionRequestsResponse listTransitionRequests( String path = "/api/2.0/mlflow/transition-requests/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListTransitionRequestsResponse.class); } catch (IOException e) { @@ -309,7 +309,7 @@ public ListRegistryWebhooks listWebhooks(ListWebhooksRequest request) { String path = "/api/2.0/mlflow/registry-webhooks/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListRegistryWebhooks.class); } catch (IOException e) { @@ -322,7 +322,7 @@ public RejectTransitionRequestResponse rejectTransitionRequest(RejectTransitionR String path = "/api/2.0/mlflow/transition-requests/reject"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RejectTransitionRequestResponse.class); @@ -336,7 +336,7 @@ public RenameModelResponse renameModel(RenameModelRequest request) { String path = "/api/2.0/mlflow/registered-models/rename"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RenameModelResponse.class); @@ -350,7 +350,7 @@ public SearchModelVersionsResponse searchModelVersions(SearchModelVersionsReques String path = "/api/2.0/mlflow/model-versions/search"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SearchModelVersionsResponse.class); } catch (IOException e) { @@ -363,7 +363,7 @@ public SearchModelsResponse searchModels(SearchModelsRequest request) { String path = "/api/2.0/mlflow/registered-models/search"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SearchModelsResponse.class); } catch (IOException e) { @@ -376,7 +376,7 @@ public void setModelTag(SetModelTagRequest request) { String path = "/api/2.0/mlflow/registered-models/set-tag"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetModelTagResponse.class); @@ -390,7 +390,7 @@ public void setModelVersionTag(SetModelVersionTagRequest request) { String path = "/api/2.0/mlflow/model-versions/set-tag"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetModelVersionTagResponse.class); @@ -405,7 +405,7 @@ public RegisteredModelPermissions setPermissions(RegisteredModelPermissionsReque String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegisteredModelPermissions.class); @@ -419,7 +419,7 @@ public TestRegistryWebhookResponse testRegistryWebhook(TestRegistryWebhookReques String path = "/api/2.0/mlflow/registry-webhooks/test"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TestRegistryWebhookResponse.class); @@ -433,7 +433,7 @@ public TransitionStageResponse transitionStage(TransitionModelVersionStageDatabr String path = "/api/2.0/mlflow/databricks/model-versions/transition-stage"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TransitionStageResponse.class); @@ -447,7 +447,7 @@ public UpdateCommentResponse updateComment(UpdateComment request) { String path = "/api/2.0/mlflow/comments/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateCommentResponse.class); @@ -461,7 +461,7 @@ public void updateModel(UpdateModelRequest request) { String path = "/api/2.0/mlflow/registered-models/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateModelResponse.class); @@ -475,7 +475,7 @@ public void updateModelVersion(UpdateModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateModelVersionResponse.class); @@ -490,7 +490,7 @@ public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRe String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RegisteredModelPermissions.class); @@ -504,7 +504,7 @@ public void updateWebhook(UpdateRegistryWebhook request) { String path = "/api/2.0/mlflow/registry-webhooks/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateWebhookResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java index e96f3ad84..298989fa7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ModelTag.ModelTagSerializer.class) +@JsonDeserialize(using = ModelTag.ModelTagDeserializer.class) public class ModelTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public ModelTag setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(ModelTag.class).add("key", key).add("value", value).toString(); } + + ModelTagPb toPb() { + ModelTagPb pb = new ModelTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static ModelTag fromPb(ModelTagPb pb) { + ModelTag model = new ModelTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ModelTagSerializer extends JsonSerializer { + @Override + public void serialize(ModelTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelTagDeserializer extends JsonDeserializer { + @Override + public ModelTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelTagPb pb = mapper.readValue(p, ModelTagPb.class); + return ModelTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTagPb.java new file mode 100755 index 000000000..464d2ba62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ModelTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public ModelTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ModelTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelTagPb that = (ModelTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(ModelTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.java index 588ad0492..401badcbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.java @@ -4,67 +4,65 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ModelVersion.ModelVersionSerializer.class) +@JsonDeserialize(using = ModelVersion.ModelVersionDeserializer.class) public class ModelVersion { /** Timestamp recorded when this `model_version` was created. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** Current stage for this `model_version`. */ - @JsonProperty("current_stage") private String currentStage; /** Description of this `model_version`. */ - @JsonProperty("description") private String description; /** Timestamp recorded when metadata for this `model_version` was last updated. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Unique name of the model */ - @JsonProperty("name") private String name; /** * MLflow run ID used when creating `model_version`, if `source` was generated by an experiment * run stored in MLflow tracking server. */ - @JsonProperty("run_id") private String runId; /** Run Link: Direct link to the run that generated this version */ - @JsonProperty("run_link") private String runLink; /** * URI indicating the location of the source model artifacts, used when creating `model_version` */ - @JsonProperty("source") private String source; /** Current status of `model_version` */ - @JsonProperty("status") private ModelVersionStatus status; /** Details on current `status`, if it is pending or failed. */ - @JsonProperty("status_message") private String statusMessage; /** Tags: Additional metadata key-value pairs for this `model_version`. */ - @JsonProperty("tags") private Collection tags; /** User that created this `model_version`. */ - @JsonProperty("user_id") private String userId; /** Model's version number. */ - @JsonProperty("version") private String version; public ModelVersion setCreationTimestamp(Long creationTimestamp) { @@ -240,4 +238,61 @@ public String toString() { .add("version", version) .toString(); } + + ModelVersionPb toPb() { + ModelVersionPb pb = new ModelVersionPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setCurrentStage(currentStage); + pb.setDescription(description); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setName(name); + pb.setRunId(runId); + pb.setRunLink(runLink); + pb.setSource(source); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + pb.setTags(tags); + pb.setUserId(userId); + pb.setVersion(version); + + return pb; + } + + static ModelVersion fromPb(ModelVersionPb pb) { + ModelVersion model = new ModelVersion(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCurrentStage(pb.getCurrentStage()); + model.setDescription(pb.getDescription()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setName(pb.getName()); + model.setRunId(pb.getRunId()); + model.setRunLink(pb.getRunLink()); + model.setSource(pb.getSource()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + model.setTags(pb.getTags()); + model.setUserId(pb.getUserId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ModelVersionSerializer extends JsonSerializer { + @Override + public void serialize(ModelVersion value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelVersionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelVersionDeserializer extends JsonDeserializer { + @Override + public ModelVersion deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelVersionPb pb = mapper.readValue(p, ModelVersionPb.class); + return ModelVersion.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java index 89602be70..42fcb2eed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ModelVersionDatabricks.ModelVersionDatabricksSerializer.class) +@JsonDeserialize(using = ModelVersionDatabricks.ModelVersionDatabricksDeserializer.class) public class ModelVersionDatabricks { /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** @@ -25,30 +35,24 @@ public class ModelVersionDatabricks { * *

* `Archived`: Archived stage. */ - @JsonProperty("current_stage") private Stage currentStage; /** User-specified description for the object. */ - @JsonProperty("description") private String description; /** Time of the object at last update, as a Unix timestamp in milliseconds. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Name of the model. */ - @JsonProperty("name") private String name; /** * Permission level of the requesting user on the object. For what is allowed at each level, see * [MLflow Model permissions](..). */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; /** Unique identifier for the MLflow tracking run associated with the source model artifacts. */ - @JsonProperty("run_id") private String runId; /** @@ -56,14 +60,12 @@ public class ModelVersionDatabricks { * time only for model versions whose source run is from a tracking server that is different from * the registry server. */ - @JsonProperty("run_link") private String runLink; /** * URI that indicates the location of the source model artifacts. This is used when creating the * model version. */ - @JsonProperty("source") private String source; /** @@ -74,23 +76,18 @@ public class ModelVersionDatabricks { * *

* `READY`: Model version is ready for use. */ - @JsonProperty("status") private Status status; /** Details on the current status, for example why registration failed. */ - @JsonProperty("status_message") private String statusMessage; /** Array of tags that are associated with the model version. */ - @JsonProperty("tags") private Collection tags; /** The username of the user that created the object. */ - @JsonProperty("user_id") private String userId; /** Version of the model. */ - @JsonProperty("version") private String version; public ModelVersionDatabricks setCreationTimestamp(Long creationTimestamp) { @@ -278,4 +275,67 @@ public String toString() { .add("version", version) .toString(); } + + ModelVersionDatabricksPb toPb() { + ModelVersionDatabricksPb pb = new ModelVersionDatabricksPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setCurrentStage(currentStage); + pb.setDescription(description); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setName(name); + pb.setPermissionLevel(permissionLevel); + pb.setRunId(runId); + pb.setRunLink(runLink); + pb.setSource(source); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + pb.setTags(tags); + pb.setUserId(userId); + pb.setVersion(version); + + return pb; + } + + static ModelVersionDatabricks fromPb(ModelVersionDatabricksPb pb) { + ModelVersionDatabricks model = new ModelVersionDatabricks(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCurrentStage(pb.getCurrentStage()); + model.setDescription(pb.getDescription()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setName(pb.getName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setRunId(pb.getRunId()); + model.setRunLink(pb.getRunLink()); + model.setSource(pb.getSource()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + model.setTags(pb.getTags()); + model.setUserId(pb.getUserId()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class ModelVersionDatabricksSerializer + extends JsonSerializer { + @Override + public void serialize( + ModelVersionDatabricks value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelVersionDatabricksPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelVersionDatabricksDeserializer + extends JsonDeserializer { + @Override + public ModelVersionDatabricks deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelVersionDatabricksPb pb = mapper.readValue(p, ModelVersionDatabricksPb.class); + return ModelVersionDatabricks.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricksPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricksPb.java new file mode 100755 index 000000000..9ebe37e3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricksPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ModelVersionDatabricksPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("current_stage") + private Stage currentStage; + + @JsonProperty("description") + private String description; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("name") + private String name; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_link") + private String runLink; + + @JsonProperty("source") + private String source; + + @JsonProperty("status") + private Status status; + + @JsonProperty("status_message") + private String statusMessage; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("user_id") + private String userId; + + @JsonProperty("version") + private String version; + + public ModelVersionDatabricksPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ModelVersionDatabricksPb setCurrentStage(Stage currentStage) { + this.currentStage = currentStage; + return this; + } + + public Stage getCurrentStage() { + return currentStage; + } + + public ModelVersionDatabricksPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ModelVersionDatabricksPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ModelVersionDatabricksPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ModelVersionDatabricksPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ModelVersionDatabricksPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ModelVersionDatabricksPb setRunLink(String runLink) { + this.runLink = runLink; + return this; + } + + public String getRunLink() { + return runLink; + } + + public ModelVersionDatabricksPb setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public ModelVersionDatabricksPb setStatus(Status status) { + this.status = status; + return this; + } + + public Status getStatus() { + return status; + } + + public ModelVersionDatabricksPb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public ModelVersionDatabricksPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ModelVersionDatabricksPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + public ModelVersionDatabricksPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelVersionDatabricksPb that = (ModelVersionDatabricksPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(currentStage, that.currentStage) + && Objects.equals(description, that.description) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(name, that.name) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(runId, that.runId) + && Objects.equals(runLink, that.runLink) + && Objects.equals(source, that.source) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(tags, that.tags) + && Objects.equals(userId, that.userId) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + currentStage, + description, + lastUpdatedTimestamp, + name, + permissionLevel, + runId, + runLink, + source, + status, + statusMessage, + tags, + userId, + version); + } + + @Override + public String toString() { + return new ToStringer(ModelVersionDatabricksPb.class) + .add("creationTimestamp", creationTimestamp) + .add("currentStage", currentStage) + .add("description", description) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("name", name) + .add("permissionLevel", permissionLevel) + .add("runId", runId) + .add("runLink", runLink) + .add("source", source) + .add("status", status) + .add("statusMessage", statusMessage) + .add("tags", tags) + .add("userId", userId) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionPb.java new file mode 100755 index 000000000..18c7ae5aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ModelVersionPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("current_stage") + private String currentStage; + + @JsonProperty("description") + private String description; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("name") + private String name; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_link") + private String runLink; + + @JsonProperty("source") + private String source; + + @JsonProperty("status") + private ModelVersionStatus status; + + @JsonProperty("status_message") + private String statusMessage; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("user_id") + private String userId; + + @JsonProperty("version") + private String version; + + public ModelVersionPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ModelVersionPb setCurrentStage(String currentStage) { + this.currentStage = currentStage; + return this; + } + + public String getCurrentStage() { + return currentStage; + } + + public ModelVersionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ModelVersionPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ModelVersionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ModelVersionPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ModelVersionPb setRunLink(String runLink) { + this.runLink = runLink; + return this; + } + + public String getRunLink() { + return runLink; + } + + public ModelVersionPb setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public ModelVersionPb setStatus(ModelVersionStatus status) { + this.status = status; + return this; + } + + public ModelVersionStatus getStatus() { + return status; + } + + public ModelVersionPb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public ModelVersionPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ModelVersionPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + public ModelVersionPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelVersionPb that = (ModelVersionPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(currentStage, that.currentStage) + && Objects.equals(description, that.description) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(name, that.name) + && Objects.equals(runId, that.runId) + && Objects.equals(runLink, that.runLink) + && Objects.equals(source, that.source) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(tags, that.tags) + && Objects.equals(userId, that.userId) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + currentStage, + description, + lastUpdatedTimestamp, + name, + runId, + runLink, + source, + status, + statusMessage, + tags, + userId, + version); + } + + @Override + public String toString() { + return new ToStringer(ModelVersionPb.class) + .add("creationTimestamp", creationTimestamp) + .add("currentStage", currentStage) + .add("description", description) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("name", name) + .add("runId", runId) + .add("runLink", runLink) + .add("source", source) + .add("status", status) + .add("statusMessage", statusMessage) + .add("tags", tags) + .add("userId", userId) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java index e91265002..26b728886 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ModelVersionTag.ModelVersionTagSerializer.class) +@JsonDeserialize(using = ModelVersionTag.ModelVersionTagDeserializer.class) public class ModelVersionTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public ModelVersionTag setKey(String key) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(ModelVersionTag.class).add("key", key).add("value", value).toString(); } + + ModelVersionTagPb toPb() { + ModelVersionTagPb pb = new ModelVersionTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static ModelVersionTag fromPb(ModelVersionTagPb pb) { + ModelVersionTag model = new ModelVersionTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ModelVersionTagSerializer extends JsonSerializer { + @Override + public void serialize(ModelVersionTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelVersionTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelVersionTagDeserializer extends JsonDeserializer { + @Override + public ModelVersionTag deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelVersionTagPb pb = mapper.readValue(p, ModelVersionTagPb.class); + return ModelVersionTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTagPb.java new file mode 100755 index 000000000..c86adf16e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ModelVersionTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public ModelVersionTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ModelVersionTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelVersionTagPb that = (ModelVersionTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(ModelVersionTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java index 9f8444d9d..a13ebf6ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Param associated with a run. */ @Generated +@JsonSerialize(using = Param.ParamSerializer.class) +@JsonDeserialize(using = Param.ParamDeserializer.class) public class Param { /** Key identifying this param. */ - @JsonProperty("key") private String key; /** Value associated with this param. */ - @JsonProperty("value") private String value; public Param setKey(String key) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(Param.class).add("key", key).add("value", value).toString(); } + + ParamPb toPb() { + ParamPb pb = new ParamPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static Param fromPb(ParamPb pb) { + Param model = new Param(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ParamSerializer extends JsonSerializer { + @Override + public void serialize(Param value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ParamPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ParamDeserializer extends JsonDeserializer { + @Override + public Param deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ParamPb pb = mapper.readValue(p, ParamPb.class); + return Param.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ParamPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ParamPb.java new file mode 100755 index 000000000..5c157f884 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ParamPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Param associated with a run. */ +@Generated +class ParamPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public ParamPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ParamPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ParamPb that = (ParamPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(ParamPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java index ec6ad3f55..b0a2c5efc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java @@ -4,25 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = RegisteredModelAccessControlRequest.RegisteredModelAccessControlRequestSerializer.class) +@JsonDeserialize( + using = + RegisteredModelAccessControlRequest.RegisteredModelAccessControlRequestDeserializer.class) public class RegisteredModelAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public RegisteredModelAccessControlRequest setGroupName(String groupName) { @@ -87,4 +97,48 @@ public String toString() { .add("userName", userName) .toString(); } + + RegisteredModelAccessControlRequestPb toPb() { + RegisteredModelAccessControlRequestPb pb = new RegisteredModelAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static RegisteredModelAccessControlRequest fromPb(RegisteredModelAccessControlRequestPb pb) { + RegisteredModelAccessControlRequest model = new RegisteredModelAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class RegisteredModelAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelAccessControlRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelAccessControlRequestPb pb = + mapper.readValue(p, RegisteredModelAccessControlRequestPb.class); + return RegisteredModelAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequestPb.java new file mode 100755 index 000000000..c01798738 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegisteredModelAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private RegisteredModelPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public RegisteredModelAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public RegisteredModelAccessControlRequestPb setPermissionLevel( + RegisteredModelPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RegisteredModelPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public RegisteredModelAccessControlRequestPb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RegisteredModelAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelAccessControlRequestPb that = (RegisteredModelAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponse.java index f4f0d9b30..b44ecb603 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponse.java @@ -4,30 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + RegisteredModelAccessControlResponse.RegisteredModelAccessControlResponseSerializer.class) +@JsonDeserialize( + using = + RegisteredModelAccessControlResponse.RegisteredModelAccessControlResponseDeserializer.class) public class RegisteredModelAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public RegisteredModelAccessControlResponse setAllPermissions( @@ -103,4 +113,50 @@ public String toString() { .add("userName", userName) .toString(); } + + RegisteredModelAccessControlResponsePb toPb() { + RegisteredModelAccessControlResponsePb pb = new RegisteredModelAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static RegisteredModelAccessControlResponse fromPb(RegisteredModelAccessControlResponsePb pb) { + RegisteredModelAccessControlResponse model = new RegisteredModelAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class RegisteredModelAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelAccessControlResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelAccessControlResponsePb pb = + mapper.readValue(p, RegisteredModelAccessControlResponsePb.class); + return RegisteredModelAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponsePb.java new file mode 100755 index 000000000..a6309a7a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponsePb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegisteredModelAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public RegisteredModelAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public RegisteredModelAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public RegisteredModelAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public RegisteredModelAccessControlResponsePb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RegisteredModelAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelAccessControlResponsePb that = (RegisteredModelAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java index 3bc0130f2..183c77cf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RegisteredModelPermission.RegisteredModelPermissionSerializer.class) +@JsonDeserialize(using = RegisteredModelPermission.RegisteredModelPermissionDeserializer.class) public class RegisteredModelPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; public RegisteredModelPermission setInherited(Boolean inherited) { @@ -73,4 +81,45 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + RegisteredModelPermissionPb toPb() { + RegisteredModelPermissionPb pb = new RegisteredModelPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static RegisteredModelPermission fromPb(RegisteredModelPermissionPb pb) { + RegisteredModelPermission model = new RegisteredModelPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class RegisteredModelPermissionSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelPermissionDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelPermissionPb pb = mapper.readValue(p, RegisteredModelPermissionPb.class); + return RegisteredModelPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionPb.java new file mode 100755 index 000000000..7af02debc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegisteredModelPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private RegisteredModelPermissionLevel permissionLevel; + + public RegisteredModelPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public RegisteredModelPermissionPb setInheritedFromObject( + Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public RegisteredModelPermissionPb setPermissionLevel( + RegisteredModelPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RegisteredModelPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelPermissionPb that = (RegisteredModelPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java index 38f96a356..b5d46f0ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RegisteredModelPermissions.RegisteredModelPermissionsSerializer.class) +@JsonDeserialize(using = RegisteredModelPermissions.RegisteredModelPermissionsDeserializer.class) public class RegisteredModelPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public RegisteredModelPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + RegisteredModelPermissionsPb toPb() { + RegisteredModelPermissionsPb pb = new RegisteredModelPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static RegisteredModelPermissions fromPb(RegisteredModelPermissionsPb pb) { + RegisteredModelPermissions model = new RegisteredModelPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class RegisteredModelPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelPermissionsDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelPermissionsPb pb = mapper.readValue(p, RegisteredModelPermissionsPb.class); + return RegisteredModelPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java index b77b306d4..54c7f566a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java @@ -4,17 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + RegisteredModelPermissionsDescription.RegisteredModelPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = + RegisteredModelPermissionsDescription.RegisteredModelPermissionsDescriptionDeserializer + .class) public class RegisteredModelPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; public RegisteredModelPermissionsDescription setDescription(String description) { @@ -57,4 +71,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + RegisteredModelPermissionsDescriptionPb toPb() { + RegisteredModelPermissionsDescriptionPb pb = new RegisteredModelPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static RegisteredModelPermissionsDescription fromPb(RegisteredModelPermissionsDescriptionPb pb) { + RegisteredModelPermissionsDescription model = new RegisteredModelPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class RegisteredModelPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelPermissionsDescription deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelPermissionsDescriptionPb pb = + mapper.readValue(p, RegisteredModelPermissionsDescriptionPb.class); + return RegisteredModelPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescriptionPb.java new file mode 100755 index 000000000..4bb2ef229 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegisteredModelPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private RegisteredModelPermissionLevel permissionLevel; + + public RegisteredModelPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public RegisteredModelPermissionsDescriptionPb setPermissionLevel( + RegisteredModelPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RegisteredModelPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelPermissionsDescriptionPb that = (RegisteredModelPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsPb.java new file mode 100755 index 000000000..08be20af8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegisteredModelPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public RegisteredModelPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public RegisteredModelPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public RegisteredModelPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelPermissionsPb that = (RegisteredModelPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java index faa9eca6f..bfec15578 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java @@ -4,19 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = RegisteredModelPermissionsRequest.RegisteredModelPermissionsRequestSerializer.class) +@JsonDeserialize( + using = RegisteredModelPermissionsRequest.RegisteredModelPermissionsRequestDeserializer.class) public class RegisteredModelPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The registered model for which to get or manage permissions. */ - @JsonIgnore private String registeredModelId; + private String registeredModelId; public RegisteredModelPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +69,44 @@ public String toString() { .add("registeredModelId", registeredModelId) .toString(); } + + RegisteredModelPermissionsRequestPb toPb() { + RegisteredModelPermissionsRequestPb pb = new RegisteredModelPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setRegisteredModelId(registeredModelId); + + return pb; + } + + static RegisteredModelPermissionsRequest fromPb(RegisteredModelPermissionsRequestPb pb) { + RegisteredModelPermissionsRequest model = new RegisteredModelPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setRegisteredModelId(pb.getRegisteredModelId()); + + return model; + } + + public static class RegisteredModelPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RegisteredModelPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelPermissionsRequestPb pb = + mapper.readValue(p, RegisteredModelPermissionsRequestPb.class); + return RegisteredModelPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequestPb.java new file mode 100755 index 000000000..f327c1b3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegisteredModelPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String registeredModelId; + + public RegisteredModelPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public RegisteredModelPermissionsRequestPb setRegisteredModelId(String registeredModelId) { + this.registeredModelId = registeredModelId; + return this; + } + + public String getRegisteredModelId() { + return registeredModelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelPermissionsRequestPb that = (RegisteredModelPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(registeredModelId, that.registeredModelId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, registeredModelId); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("registeredModelId", registeredModelId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java index 10fcb66c0..4004ccbbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RegistryWebhook.RegistryWebhookSerializer.class) +@JsonDeserialize(using = RegistryWebhook.RegistryWebhookDeserializer.class) public class RegistryWebhook { /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** User-specified description for the webhook. */ - @JsonProperty("description") private String description; /** @@ -49,27 +58,21 @@ public class RegistryWebhook { * *

* `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. */ - @JsonProperty("events") private Collection events; /** */ - @JsonProperty("http_url_spec") private HttpUrlSpecWithoutSecret httpUrlSpec; /** Webhook ID */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("job_spec") private JobSpecWithoutSecret jobSpec; /** Time of the object at last update, as a Unix timestamp in milliseconds. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** Name of the model whose events would trigger this webhook. */ - @JsonProperty("model_name") private String modelName; /** @@ -81,7 +84,6 @@ public class RegistryWebhook { *

* `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a * real event. */ - @JsonProperty("status") private RegistryWebhookStatus status; public RegistryWebhook setCreationTimestamp(Long creationTimestamp) { @@ -209,4 +211,54 @@ public String toString() { .add("status", status) .toString(); } + + RegistryWebhookPb toPb() { + RegistryWebhookPb pb = new RegistryWebhookPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setDescription(description); + pb.setEvents(events); + pb.setHttpUrlSpec(httpUrlSpec); + pb.setId(id); + pb.setJobSpec(jobSpec); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setModelName(modelName); + pb.setStatus(status); + + return pb; + } + + static RegistryWebhook fromPb(RegistryWebhookPb pb) { + RegistryWebhook model = new RegistryWebhook(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setDescription(pb.getDescription()); + model.setEvents(pb.getEvents()); + model.setHttpUrlSpec(pb.getHttpUrlSpec()); + model.setId(pb.getId()); + model.setJobSpec(pb.getJobSpec()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setModelName(pb.getModelName()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class RegistryWebhookSerializer extends JsonSerializer { + @Override + public void serialize(RegistryWebhook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegistryWebhookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegistryWebhookDeserializer extends JsonDeserializer { + @Override + public RegistryWebhook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegistryWebhookPb pb = mapper.readValue(p, RegistryWebhookPb.class); + return RegistryWebhook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookPb.java new file mode 100755 index 000000000..97c464208 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RegistryWebhookPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("description") + private String description; + + @JsonProperty("events") + private Collection events; + + @JsonProperty("http_url_spec") + private HttpUrlSpecWithoutSecret httpUrlSpec; + + @JsonProperty("id") + private String id; + + @JsonProperty("job_spec") + private JobSpecWithoutSecret jobSpec; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("status") + private RegistryWebhookStatus status; + + public RegistryWebhookPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public RegistryWebhookPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public RegistryWebhookPb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public RegistryWebhookPb setHttpUrlSpec(HttpUrlSpecWithoutSecret httpUrlSpec) { + this.httpUrlSpec = httpUrlSpec; + return this; + } + + public HttpUrlSpecWithoutSecret getHttpUrlSpec() { + return httpUrlSpec; + } + + public RegistryWebhookPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public RegistryWebhookPb setJobSpec(JobSpecWithoutSecret jobSpec) { + this.jobSpec = jobSpec; + return this; + } + + public JobSpecWithoutSecret getJobSpec() { + return jobSpec; + } + + public RegistryWebhookPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public RegistryWebhookPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public RegistryWebhookPb setStatus(RegistryWebhookStatus status) { + this.status = status; + return this; + } + + public RegistryWebhookStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegistryWebhookPb that = (RegistryWebhookPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(description, that.description) + && Objects.equals(events, that.events) + && Objects.equals(httpUrlSpec, that.httpUrlSpec) + && Objects.equals(id, that.id) + && Objects.equals(jobSpec, that.jobSpec) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(modelName, that.modelName) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + description, + events, + httpUrlSpec, + id, + jobSpec, + lastUpdatedTimestamp, + modelName, + status); + } + + @Override + public String toString() { + return new ToStringer(RegistryWebhookPb.class) + .add("creationTimestamp", creationTimestamp) + .add("description", description) + .add("events", events) + .add("httpUrlSpec", httpUrlSpec) + .add("id", id) + .add("jobSpec", jobSpec) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("modelName", modelName) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java index c781e08a7..b9598b63a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RejectTransitionRequest.RejectTransitionRequestSerializer.class) +@JsonDeserialize(using = RejectTransitionRequest.RejectTransitionRequestDeserializer.class) public class RejectTransitionRequest { /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Name of the model. */ - @JsonProperty("name") private String name; /** @@ -28,11 +37,9 @@ public class RejectTransitionRequest { * *

* `Archived`: Archived stage. */ - @JsonProperty("stage") private Stage stage; /** Version of the model. */ - @JsonProperty("version") private String version; public RejectTransitionRequest setComment(String comment) { @@ -96,4 +103,47 @@ public String toString() { .add("version", version) .toString(); } + + RejectTransitionRequestPb toPb() { + RejectTransitionRequestPb pb = new RejectTransitionRequestPb(); + pb.setComment(comment); + pb.setName(name); + pb.setStage(stage); + pb.setVersion(version); + + return pb; + } + + static RejectTransitionRequest fromPb(RejectTransitionRequestPb pb) { + RejectTransitionRequest model = new RejectTransitionRequest(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setStage(pb.getStage()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class RejectTransitionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RejectTransitionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RejectTransitionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RejectTransitionRequestDeserializer + extends JsonDeserializer { + @Override + public RejectTransitionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RejectTransitionRequestPb pb = mapper.readValue(p, RejectTransitionRequestPb.class); + return RejectTransitionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestPb.java new file mode 100755 index 000000000..84da20fe5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RejectTransitionRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("stage") + private Stage stage; + + @JsonProperty("version") + private String version; + + public RejectTransitionRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public RejectTransitionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public RejectTransitionRequestPb setStage(Stage stage) { + this.stage = stage; + return this; + } + + public Stage getStage() { + return stage; + } + + public RejectTransitionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RejectTransitionRequestPb that = (RejectTransitionRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(stage, that.stage) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, stage, version); + } + + @Override + public String toString() { + return new ToStringer(RejectTransitionRequestPb.class) + .add("comment", comment) + .add("name", name) + .add("stage", stage) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java index 8a568ffba..433f0fd85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = RejectTransitionRequestResponse.RejectTransitionRequestResponseSerializer.class) +@JsonDeserialize( + using = RejectTransitionRequestResponse.RejectTransitionRequestResponseDeserializer.class) public class RejectTransitionRequestResponse { /** Activity recorded for the action. */ - @JsonProperty("activity") private Activity activity; public RejectTransitionRequestResponse setActivity(Activity activity) { @@ -41,4 +53,42 @@ public String toString() { .add("activity", activity) .toString(); } + + RejectTransitionRequestResponsePb toPb() { + RejectTransitionRequestResponsePb pb = new RejectTransitionRequestResponsePb(); + pb.setActivity(activity); + + return pb; + } + + static RejectTransitionRequestResponse fromPb(RejectTransitionRequestResponsePb pb) { + RejectTransitionRequestResponse model = new RejectTransitionRequestResponse(); + model.setActivity(pb.getActivity()); + + return model; + } + + public static class RejectTransitionRequestResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RejectTransitionRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RejectTransitionRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RejectTransitionRequestResponseDeserializer + extends JsonDeserializer { + @Override + public RejectTransitionRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RejectTransitionRequestResponsePb pb = + mapper.readValue(p, RejectTransitionRequestResponsePb.class); + return RejectTransitionRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponsePb.java new file mode 100755 index 000000000..5a687ff42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RejectTransitionRequestResponsePb { + @JsonProperty("activity") + private Activity activity; + + public RejectTransitionRequestResponsePb setActivity(Activity activity) { + this.activity = activity; + return this; + } + + public Activity getActivity() { + return activity; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RejectTransitionRequestResponsePb that = (RejectTransitionRequestResponsePb) o; + return Objects.equals(activity, that.activity); + } + + @Override + public int hashCode() { + return Objects.hash(activity); + } + + @Override + public String toString() { + return new ToStringer(RejectTransitionRequestResponsePb.class) + .add("activity", activity) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java index 3bcd7c31a..50729fa84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RenameModelRequest.RenameModelRequestSerializer.class) +@JsonDeserialize(using = RenameModelRequest.RenameModelRequestDeserializer.class) public class RenameModelRequest { /** Registered model unique name identifier. */ - @JsonProperty("name") private String name; /** If provided, updates the name for this `registered_model`. */ - @JsonProperty("new_name") private String newName; public RenameModelRequest setName(String name) { @@ -55,4 +64,40 @@ public String toString() { .add("newName", newName) .toString(); } + + RenameModelRequestPb toPb() { + RenameModelRequestPb pb = new RenameModelRequestPb(); + pb.setName(name); + pb.setNewName(newName); + + return pb; + } + + static RenameModelRequest fromPb(RenameModelRequestPb pb) { + RenameModelRequest model = new RenameModelRequest(); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + + return model; + } + + public static class RenameModelRequestSerializer extends JsonSerializer { + @Override + public void serialize(RenameModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RenameModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RenameModelRequestDeserializer extends JsonDeserializer { + @Override + public RenameModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RenameModelRequestPb pb = mapper.readValue(p, RenameModelRequestPb.class); + return RenameModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequestPb.java new file mode 100755 index 000000000..aee737566 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RenameModelRequestPb { + @JsonProperty("name") + private String name; + + @JsonProperty("new_name") + private String newName; + + public RenameModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public RenameModelRequestPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RenameModelRequestPb that = (RenameModelRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(newName, that.newName); + } + + @Override + public int hashCode() { + return Objects.hash(name, newName); + } + + @Override + public String toString() { + return new ToStringer(RenameModelRequestPb.class) + .add("name", name) + .add("newName", newName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java index 2d4c0181a..4eb1e9342 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RenameModelResponse.RenameModelResponseSerializer.class) +@JsonDeserialize(using = RenameModelResponse.RenameModelResponseDeserializer.class) public class RenameModelResponse { /** */ - @JsonProperty("registered_model") private Model registeredModel; public RenameModelResponse setRegisteredModel(Model registeredModel) { @@ -41,4 +51,39 @@ public String toString() { .add("registeredModel", registeredModel) .toString(); } + + RenameModelResponsePb toPb() { + RenameModelResponsePb pb = new RenameModelResponsePb(); + pb.setRegisteredModel(registeredModel); + + return pb; + } + + static RenameModelResponse fromPb(RenameModelResponsePb pb) { + RenameModelResponse model = new RenameModelResponse(); + model.setRegisteredModel(pb.getRegisteredModel()); + + return model; + } + + public static class RenameModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(RenameModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RenameModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RenameModelResponseDeserializer + extends JsonDeserializer { + @Override + public RenameModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RenameModelResponsePb pb = mapper.readValue(p, RenameModelResponsePb.class); + return RenameModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponsePb.java new file mode 100755 index 000000000..50ba899d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RenameModelResponsePb { + @JsonProperty("registered_model") + private Model registeredModel; + + public RenameModelResponsePb setRegisteredModel(Model registeredModel) { + this.registeredModel = registeredModel; + return this; + } + + public Model getRegisteredModel() { + return registeredModel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RenameModelResponsePb that = (RenameModelResponsePb) o; + return Objects.equals(registeredModel, that.registeredModel); + } + + @Override + public int hashCode() { + return Objects.hash(registeredModel); + } + + @Override + public String toString() { + return new ToStringer(RenameModelResponsePb.class) + .add("registeredModel", registeredModel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java index 546e9de05..79ca166f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreExperiment.RestoreExperimentSerializer.class) +@JsonDeserialize(using = RestoreExperiment.RestoreExperimentDeserializer.class) public class RestoreExperiment { /** ID of the associated experiment. */ - @JsonProperty("experiment_id") private String experimentId; public RestoreExperiment setExperimentId(String experimentId) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreExperiment.class).add("experimentId", experimentId).toString(); } + + RestoreExperimentPb toPb() { + RestoreExperimentPb pb = new RestoreExperimentPb(); + pb.setExperimentId(experimentId); + + return pb; + } + + static RestoreExperiment fromPb(RestoreExperimentPb pb) { + RestoreExperiment model = new RestoreExperiment(); + model.setExperimentId(pb.getExperimentId()); + + return model; + } + + public static class RestoreExperimentSerializer extends JsonSerializer { + @Override + public void serialize(RestoreExperiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreExperimentDeserializer extends JsonDeserializer { + @Override + public RestoreExperiment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreExperimentPb pb = mapper.readValue(p, RestoreExperimentPb.class); + return RestoreExperiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentPb.java new file mode 100755 index 000000000..62d2dc61c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestoreExperimentPb { + @JsonProperty("experiment_id") + private String experimentId; + + public RestoreExperimentPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreExperimentPb that = (RestoreExperimentPb) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(RestoreExperimentPb.class).add("experimentId", experimentId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java index eb0e4f4e3..da6ce87d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreExperimentResponse.RestoreExperimentResponseSerializer.class) +@JsonDeserialize(using = RestoreExperimentResponse.RestoreExperimentResponseDeserializer.class) public class RestoreExperimentResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreExperimentResponse.class).toString(); } + + RestoreExperimentResponsePb toPb() { + RestoreExperimentResponsePb pb = new RestoreExperimentResponsePb(); + + return pb; + } + + static RestoreExperimentResponse fromPb(RestoreExperimentResponsePb pb) { + RestoreExperimentResponse model = new RestoreExperimentResponse(); + + return model; + } + + public static class RestoreExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RestoreExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public RestoreExperimentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreExperimentResponsePb pb = mapper.readValue(p, RestoreExperimentResponsePb.class); + return RestoreExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponsePb.java new file mode 100755 index 000000000..f3a1844b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RestoreExperimentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RestoreExperimentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java index e20c41822..b26a4de3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreRun.RestoreRunSerializer.class) +@JsonDeserialize(using = RestoreRun.RestoreRunDeserializer.class) public class RestoreRun { /** ID of the run to restore. */ - @JsonProperty("run_id") private String runId; public RestoreRun setRunId(String runId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreRun.class).add("runId", runId).toString(); } + + RestoreRunPb toPb() { + RestoreRunPb pb = new RestoreRunPb(); + pb.setRunId(runId); + + return pb; + } + + static RestoreRun fromPb(RestoreRunPb pb) { + RestoreRun model = new RestoreRun(); + model.setRunId(pb.getRunId()); + + return model; + } + + public static class RestoreRunSerializer extends JsonSerializer { + @Override + public void serialize(RestoreRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreRunDeserializer extends JsonDeserializer { + @Override + public RestoreRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreRunPb pb = mapper.readValue(p, RestoreRunPb.class); + return RestoreRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunPb.java new file mode 100755 index 000000000..7040b0190 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestoreRunPb { + @JsonProperty("run_id") + private String runId; + + public RestoreRunPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreRunPb that = (RestoreRunPb) o; + return Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(runId); + } + + @Override + public String toString() { + return new ToStringer(RestoreRunPb.class).add("runId", runId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java index 3fa8ef75f..29cfb76fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreRunResponse.RestoreRunResponseSerializer.class) +@JsonDeserialize(using = RestoreRunResponse.RestoreRunResponseDeserializer.class) public class RestoreRunResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreRunResponse.class).toString(); } + + RestoreRunResponsePb toPb() { + RestoreRunResponsePb pb = new RestoreRunResponsePb(); + + return pb; + } + + static RestoreRunResponse fromPb(RestoreRunResponsePb pb) { + RestoreRunResponse model = new RestoreRunResponse(); + + return model; + } + + public static class RestoreRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(RestoreRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreRunResponseDeserializer extends JsonDeserializer { + @Override + public RestoreRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreRunResponsePb pb = mapper.readValue(p, RestoreRunResponsePb.class); + return RestoreRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponsePb.java new file mode 100755 index 000000000..2cf56401d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RestoreRunResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RestoreRunResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java index 0ce329136..a5360bb76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java @@ -4,27 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreRuns.RestoreRunsSerializer.class) +@JsonDeserialize(using = RestoreRuns.RestoreRunsDeserializer.class) public class RestoreRuns { /** The ID of the experiment containing the runs to restore. */ - @JsonProperty("experiment_id") private String experimentId; /** * An optional positive integer indicating the maximum number of runs to restore. The maximum * allowed value for max_runs is 10000. */ - @JsonProperty("max_runs") private Long maxRuns; /** * The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only * runs deleted no earlier than this timestamp are restored. */ - @JsonProperty("min_timestamp_millis") private Long minTimestampMillis; public RestoreRuns setExperimentId(String experimentId) { @@ -77,4 +85,41 @@ public String toString() { .add("minTimestampMillis", minTimestampMillis) .toString(); } + + RestoreRunsPb toPb() { + RestoreRunsPb pb = new RestoreRunsPb(); + pb.setExperimentId(experimentId); + pb.setMaxRuns(maxRuns); + pb.setMinTimestampMillis(minTimestampMillis); + + return pb; + } + + static RestoreRuns fromPb(RestoreRunsPb pb) { + RestoreRuns model = new RestoreRuns(); + model.setExperimentId(pb.getExperimentId()); + model.setMaxRuns(pb.getMaxRuns()); + model.setMinTimestampMillis(pb.getMinTimestampMillis()); + + return model; + } + + public static class RestoreRunsSerializer extends JsonSerializer { + @Override + public void serialize(RestoreRuns value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreRunsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreRunsDeserializer extends JsonDeserializer { + @Override + public RestoreRuns deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreRunsPb pb = mapper.readValue(p, RestoreRunsPb.class); + return RestoreRuns.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsPb.java new file mode 100755 index 000000000..ee4ec2fa0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestoreRunsPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("max_runs") + private Long maxRuns; + + @JsonProperty("min_timestamp_millis") + private Long minTimestampMillis; + + public RestoreRunsPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public RestoreRunsPb setMaxRuns(Long maxRuns) { + this.maxRuns = maxRuns; + return this; + } + + public Long getMaxRuns() { + return maxRuns; + } + + public RestoreRunsPb setMinTimestampMillis(Long minTimestampMillis) { + this.minTimestampMillis = minTimestampMillis; + return this; + } + + public Long getMinTimestampMillis() { + return minTimestampMillis; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreRunsPb that = (RestoreRunsPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(maxRuns, that.maxRuns) + && Objects.equals(minTimestampMillis, that.minTimestampMillis); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, maxRuns, minTimestampMillis); + } + + @Override + public String toString() { + return new ToStringer(RestoreRunsPb.class) + .add("experimentId", experimentId) + .add("maxRuns", maxRuns) + .add("minTimestampMillis", minTimestampMillis) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java index 45b2a692f..f42db86d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreRunsResponse.RestoreRunsResponseSerializer.class) +@JsonDeserialize(using = RestoreRunsResponse.RestoreRunsResponseDeserializer.class) public class RestoreRunsResponse { /** The number of runs restored. */ - @JsonProperty("runs_restored") private Long runsRestored; public RestoreRunsResponse setRunsRestored(Long runsRestored) { @@ -39,4 +49,39 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreRunsResponse.class).add("runsRestored", runsRestored).toString(); } + + RestoreRunsResponsePb toPb() { + RestoreRunsResponsePb pb = new RestoreRunsResponsePb(); + pb.setRunsRestored(runsRestored); + + return pb; + } + + static RestoreRunsResponse fromPb(RestoreRunsResponsePb pb) { + RestoreRunsResponse model = new RestoreRunsResponse(); + model.setRunsRestored(pb.getRunsRestored()); + + return model; + } + + public static class RestoreRunsResponseSerializer extends JsonSerializer { + @Override + public void serialize(RestoreRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreRunsResponseDeserializer + extends JsonDeserializer { + @Override + public RestoreRunsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreRunsResponsePb pb = mapper.readValue(p, RestoreRunsResponsePb.class); + return RestoreRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponsePb.java new file mode 100755 index 000000000..d06d7f98e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestoreRunsResponsePb { + @JsonProperty("runs_restored") + private Long runsRestored; + + public RestoreRunsResponsePb setRunsRestored(Long runsRestored) { + this.runsRestored = runsRestored; + return this; + } + + public Long getRunsRestored() { + return runsRestored; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreRunsResponsePb that = (RestoreRunsResponsePb) o; + return Objects.equals(runsRestored, that.runsRestored); + } + + @Override + public int hashCode() { + return Objects.hash(runsRestored); + } + + @Override + public String toString() { + return new ToStringer(RestoreRunsResponsePb.class).add("runsRestored", runsRestored).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java index f914c7738..f7eacd662 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A single run. */ @Generated +@JsonSerialize(using = Run.RunSerializer.class) +@JsonDeserialize(using = Run.RunDeserializer.class) public class Run { /** Run data. */ - @JsonProperty("data") private RunData data; /** Run metadata. */ - @JsonProperty("info") private RunInfo info; /** Run inputs. */ - @JsonProperty("inputs") private RunInputs inputs; public Run setData(RunData data) { @@ -72,4 +80,41 @@ public String toString() { .add("inputs", inputs) .toString(); } + + RunPb toPb() { + RunPb pb = new RunPb(); + pb.setData(data); + pb.setInfo(info); + pb.setInputs(inputs); + + return pb; + } + + static Run fromPb(RunPb pb) { + Run model = new Run(); + model.setData(pb.getData()); + model.setInfo(pb.getInfo()); + model.setInputs(pb.getInputs()); + + return model; + } + + public static class RunSerializer extends JsonSerializer { + @Override + public void serialize(Run value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunDeserializer extends JsonDeserializer { + @Override + public Run deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunPb pb = mapper.readValue(p, RunPb.class); + return Run.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java index b7aa224f0..bb23483be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java @@ -4,23 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Run data (metrics, params, and tags). */ @Generated +@JsonSerialize(using = RunData.RunDataSerializer.class) +@JsonDeserialize(using = RunData.RunDataDeserializer.class) public class RunData { /** Run metrics. */ - @JsonProperty("metrics") private Collection metrics; /** Run parameters. */ - @JsonProperty("params") private Collection params; /** Additional metadata key-value pairs. */ - @JsonProperty("tags") private Collection tags; public RunData setMetrics(Collection metrics) { @@ -73,4 +81,41 @@ public String toString() { .add("tags", tags) .toString(); } + + RunDataPb toPb() { + RunDataPb pb = new RunDataPb(); + pb.setMetrics(metrics); + pb.setParams(params); + pb.setTags(tags); + + return pb; + } + + static RunData fromPb(RunDataPb pb) { + RunData model = new RunData(); + model.setMetrics(pb.getMetrics()); + model.setParams(pb.getParams()); + model.setTags(pb.getTags()); + + return model; + } + + public static class RunDataSerializer extends JsonSerializer { + @Override + public void serialize(RunData value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunDataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunDataDeserializer extends JsonDeserializer { + @Override + public RunData deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunDataPb pb = mapper.readValue(p, RunDataPb.class); + return RunData.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunDataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunDataPb.java new file mode 100755 index 000000000..9443cfa57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunDataPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Run data (metrics, params, and tags). */ +@Generated +class RunDataPb { + @JsonProperty("metrics") + private Collection metrics; + + @JsonProperty("params") + private Collection params; + + @JsonProperty("tags") + private Collection tags; + + public RunDataPb setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public RunDataPb setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + public RunDataPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunDataPb that = (RunDataPb) o; + return Objects.equals(metrics, that.metrics) + && Objects.equals(params, that.params) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(metrics, params, tags); + } + + @Override + public String toString() { + return new ToStringer(RunDataPb.class) + .add("metrics", metrics) + .add("params", params) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java index ee4b2f388..1a667dbea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java @@ -4,60 +4,61 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Metadata of a single run. */ @Generated +@JsonSerialize(using = RunInfo.RunInfoSerializer.class) +@JsonDeserialize(using = RunInfo.RunInfoDeserializer.class) public class RunInfo { /** * URI of the directory where artifacts should be uploaded. This can be a local path (starting * with "/"), or a distributed file system (DFS) path, like ``s3://bucket/directory`` or * ``dbfs:/my/directory``. If not set, the local ``./mlruns`` directory is chosen. */ - @JsonProperty("artifact_uri") private String artifactUri; /** Unix timestamp of when the run ended in milliseconds. */ - @JsonProperty("end_time") private Long endTime; /** The experiment ID. */ - @JsonProperty("experiment_id") private String experimentId; /** Current life cycle stage of the experiment : OneOf("active", "deleted") */ - @JsonProperty("lifecycle_stage") private String lifecycleStage; /** Unique identifier for the run. */ - @JsonProperty("run_id") private String runId; /** The name of the run. */ - @JsonProperty("run_name") private String runName; /** * [Deprecated, use run_id instead] Unique identifier for the run. This field will be removed in a * future MLflow version. */ - @JsonProperty("run_uuid") private String runUuid; /** Unix timestamp of when the run started in milliseconds. */ - @JsonProperty("start_time") private Long startTime; /** Current status of the run. */ - @JsonProperty("status") private RunInfoStatus status; /** * User who initiated the run. This field is deprecated as of MLflow 1.0, and will be removed in a * future MLflow release. Use 'mlflow.user' tag instead. */ - @JsonProperty("user_id") private String userId; public RunInfo setArtifactUri(String artifactUri) { @@ -197,4 +198,55 @@ public String toString() { .add("userId", userId) .toString(); } + + RunInfoPb toPb() { + RunInfoPb pb = new RunInfoPb(); + pb.setArtifactUri(artifactUri); + pb.setEndTime(endTime); + pb.setExperimentId(experimentId); + pb.setLifecycleStage(lifecycleStage); + pb.setRunId(runId); + pb.setRunName(runName); + pb.setRunUuid(runUuid); + pb.setStartTime(startTime); + pb.setStatus(status); + pb.setUserId(userId); + + return pb; + } + + static RunInfo fromPb(RunInfoPb pb) { + RunInfo model = new RunInfo(); + model.setArtifactUri(pb.getArtifactUri()); + model.setEndTime(pb.getEndTime()); + model.setExperimentId(pb.getExperimentId()); + model.setLifecycleStage(pb.getLifecycleStage()); + model.setRunId(pb.getRunId()); + model.setRunName(pb.getRunName()); + model.setRunUuid(pb.getRunUuid()); + model.setStartTime(pb.getStartTime()); + model.setStatus(pb.getStatus()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class RunInfoSerializer extends JsonSerializer { + @Override + public void serialize(RunInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunInfoDeserializer extends JsonDeserializer { + @Override + public RunInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunInfoPb pb = mapper.readValue(p, RunInfoPb.class); + return RunInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoPb.java new file mode 100755 index 000000000..491108c29 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoPb.java @@ -0,0 +1,180 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metadata of a single run. */ +@Generated +class RunInfoPb { + @JsonProperty("artifact_uri") + private String artifactUri; + + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("lifecycle_stage") + private String lifecycleStage; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("run_uuid") + private String runUuid; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("status") + private RunInfoStatus status; + + @JsonProperty("user_id") + private String userId; + + public RunInfoPb setArtifactUri(String artifactUri) { + this.artifactUri = artifactUri; + return this; + } + + public String getArtifactUri() { + return artifactUri; + } + + public RunInfoPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public RunInfoPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public RunInfoPb setLifecycleStage(String lifecycleStage) { + this.lifecycleStage = lifecycleStage; + return this; + } + + public String getLifecycleStage() { + return lifecycleStage; + } + + public RunInfoPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public RunInfoPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public RunInfoPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + public RunInfoPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public RunInfoPb setStatus(RunInfoStatus status) { + this.status = status; + return this; + } + + public RunInfoStatus getStatus() { + return status; + } + + public RunInfoPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunInfoPb that = (RunInfoPb) o; + return Objects.equals(artifactUri, that.artifactUri) + && Objects.equals(endTime, that.endTime) + && Objects.equals(experimentId, that.experimentId) + && Objects.equals(lifecycleStage, that.lifecycleStage) + && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) + && Objects.equals(runUuid, that.runUuid) + && Objects.equals(startTime, that.startTime) + && Objects.equals(status, that.status) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + artifactUri, + endTime, + experimentId, + lifecycleStage, + runId, + runName, + runUuid, + startTime, + status, + userId); + } + + @Override + public String toString() { + return new ToStringer(RunInfoPb.class) + .add("artifactUri", artifactUri) + .add("endTime", endTime) + .add("experimentId", experimentId) + .add("lifecycleStage", lifecycleStage) + .add("runId", runId) + .add("runName", runName) + .add("runUuid", runUuid) + .add("startTime", startTime) + .add("status", status) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index 604f034f7..efebd6f46 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -4,24 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Run inputs. */ @Generated +@JsonSerialize(using = RunInputs.RunInputsSerializer.class) +@JsonDeserialize(using = RunInputs.RunInputsDeserializer.class) public class RunInputs { /** Run metrics. */ - @JsonProperty("dataset_inputs") private Collection datasetInputs; - /** - * **NOTE**: Experimental: This API field may change or be removed in a future release without - * warning. - * - *

Model inputs to the Run. - */ - @JsonProperty("model_inputs") + /** Model inputs to the Run. */ private Collection modelInputs; public RunInputs setDatasetInputs(Collection datasetInputs) { @@ -63,4 +67,39 @@ public String toString() { .add("modelInputs", modelInputs) .toString(); } + + RunInputsPb toPb() { + RunInputsPb pb = new RunInputsPb(); + pb.setDatasetInputs(datasetInputs); + pb.setModelInputs(modelInputs); + + return pb; + } + + static RunInputs fromPb(RunInputsPb pb) { + RunInputs model = new RunInputs(); + model.setDatasetInputs(pb.getDatasetInputs()); + model.setModelInputs(pb.getModelInputs()); + + return model; + } + + public static class RunInputsSerializer extends JsonSerializer { + @Override + public void serialize(RunInputs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunInputsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunInputsDeserializer extends JsonDeserializer { + @Override + public RunInputs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunInputsPb pb = mapper.readValue(p, RunInputsPb.class); + return RunInputs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputsPb.java new file mode 100755 index 000000000..f6a09d6b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputsPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Run inputs. */ +@Generated +class RunInputsPb { + @JsonProperty("dataset_inputs") + private Collection datasetInputs; + + @JsonProperty("model_inputs") + private Collection modelInputs; + + public RunInputsPb setDatasetInputs(Collection datasetInputs) { + this.datasetInputs = datasetInputs; + return this; + } + + public Collection getDatasetInputs() { + return datasetInputs; + } + + public RunInputsPb setModelInputs(Collection modelInputs) { + this.modelInputs = modelInputs; + return this; + } + + public Collection getModelInputs() { + return modelInputs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunInputsPb that = (RunInputsPb) o; + return Objects.equals(datasetInputs, that.datasetInputs) + && Objects.equals(modelInputs, that.modelInputs); + } + + @Override + public int hashCode() { + return Objects.hash(datasetInputs, modelInputs); + } + + @Override + public String toString() { + return new ToStringer(RunInputsPb.class) + .add("datasetInputs", datasetInputs) + .add("modelInputs", modelInputs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunPb.java new file mode 100755 index 000000000..ed7114184 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A single run. */ +@Generated +class RunPb { + @JsonProperty("data") + private RunData data; + + @JsonProperty("info") + private RunInfo info; + + @JsonProperty("inputs") + private RunInputs inputs; + + public RunPb setData(RunData data) { + this.data = data; + return this; + } + + public RunData getData() { + return data; + } + + public RunPb setInfo(RunInfo info) { + this.info = info; + return this; + } + + public RunInfo getInfo() { + return info; + } + + public RunPb setInputs(RunInputs inputs) { + this.inputs = inputs; + return this; + } + + public RunInputs getInputs() { + return inputs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunPb that = (RunPb) o; + return Objects.equals(data, that.data) + && Objects.equals(info, that.info) + && Objects.equals(inputs, that.inputs); + } + + @Override + public int hashCode() { + return Objects.hash(data, info, inputs); + } + + @Override + public String toString() { + return new ToStringer(RunPb.class) + .add("data", data) + .add("info", info) + .add("inputs", inputs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java index 5e029636c..9e24959a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Tag for a run. */ @Generated +@JsonSerialize(using = RunTag.RunTagSerializer.class) +@JsonDeserialize(using = RunTag.RunTagDeserializer.class) public class RunTag { /** The tag key. */ - @JsonProperty("key") private String key; /** The tag value. */ - @JsonProperty("value") private String value; public RunTag setKey(String key) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(RunTag.class).add("key", key).add("value", value).toString(); } + + RunTagPb toPb() { + RunTagPb pb = new RunTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static RunTag fromPb(RunTagPb pb) { + RunTag model = new RunTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class RunTagSerializer extends JsonSerializer { + @Override + public void serialize(RunTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunTagDeserializer extends JsonDeserializer { + @Override + public RunTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunTagPb pb = mapper.readValue(p, RunTagPb.class); + return RunTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTagPb.java new file mode 100755 index 000000000..a2409355b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTagPb.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Tag for a run. */ +@Generated +class RunTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public RunTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public RunTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunTagPb that = (RunTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(RunTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java index e1cc5aaec..7fd9de24e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchExperiments.SearchExperimentsSerializer.class) +@JsonDeserialize(using = SearchExperiments.SearchExperimentsDeserializer.class) public class SearchExperiments { /** String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") */ - @JsonProperty("filter") private String filter; /** Maximum number of experiments desired. Max threshold is 3000. */ - @JsonProperty("max_results") private Long maxResults; /** @@ -23,18 +32,15 @@ public class SearchExperiments { * timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks * are done by experiment id DESC. */ - @JsonProperty("order_by") private Collection orderBy; /** Token indicating the page of experiments to fetch */ - @JsonProperty("page_token") private String pageToken; /** * Qualifier for type of experiments to be returned. If unspecified, return only active * experiments. */ - @JsonProperty("view_type") private ViewType viewType; public SearchExperiments setFilter(String filter) { @@ -109,4 +115,46 @@ public String toString() { .add("viewType", viewType) .toString(); } + + SearchExperimentsPb toPb() { + SearchExperimentsPb pb = new SearchExperimentsPb(); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + pb.setViewType(viewType); + + return pb; + } + + static SearchExperiments fromPb(SearchExperimentsPb pb) { + SearchExperiments model = new SearchExperiments(); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + model.setViewType(pb.getViewType()); + + return model; + } + + public static class SearchExperimentsSerializer extends JsonSerializer { + @Override + public void serialize(SearchExperiments value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchExperimentsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchExperimentsDeserializer extends JsonDeserializer { + @Override + public SearchExperiments deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchExperimentsPb pb = mapper.readValue(p, SearchExperimentsPb.class); + return SearchExperiments.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsPb.java new file mode 100755 index 000000000..82b3b5c94 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchExperimentsPb { + @JsonProperty("filter") + private String filter; + + @JsonProperty("max_results") + private Long maxResults; + + @JsonProperty("order_by") + private Collection orderBy; + + @JsonProperty("page_token") + private String pageToken; + + @JsonProperty("view_type") + private ViewType viewType; + + public SearchExperimentsPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchExperimentsPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchExperimentsPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchExperimentsPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public SearchExperimentsPb setViewType(ViewType viewType) { + this.viewType = viewType; + return this; + } + + public ViewType getViewType() { + return viewType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchExperimentsPb that = (SearchExperimentsPb) o; + return Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(viewType, that.viewType); + } + + @Override + public int hashCode() { + return Objects.hash(filter, maxResults, orderBy, pageToken, viewType); + } + + @Override + public String toString() { + return new ToStringer(SearchExperimentsPb.class) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .add("viewType", viewType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java index ea50e7556..5f0578252 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchExperimentsResponse.SearchExperimentsResponseSerializer.class) +@JsonDeserialize(using = SearchExperimentsResponse.SearchExperimentsResponseDeserializer.class) public class SearchExperimentsResponse { /** Experiments that match the search criteria */ - @JsonProperty("experiments") private Collection experiments; /** * Token that can be used to retrieve the next page of experiments. An empty token means that no * more experiments are available for retrieval. */ - @JsonProperty("next_page_token") private String nextPageToken; public SearchExperimentsResponse setExperiments(Collection experiments) { @@ -60,4 +69,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + SearchExperimentsResponsePb toPb() { + SearchExperimentsResponsePb pb = new SearchExperimentsResponsePb(); + pb.setExperiments(experiments); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static SearchExperimentsResponse fromPb(SearchExperimentsResponsePb pb) { + SearchExperimentsResponse model = new SearchExperimentsResponse(); + model.setExperiments(pb.getExperiments()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class SearchExperimentsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchExperimentsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchExperimentsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchExperimentsResponseDeserializer + extends JsonDeserializer { + @Override + public SearchExperimentsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchExperimentsResponsePb pb = mapper.readValue(p, SearchExperimentsResponsePb.class); + return SearchExperimentsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponsePb.java new file mode 100755 index 000000000..4c2100da0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchExperimentsResponsePb { + @JsonProperty("experiments") + private Collection experiments; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchExperimentsResponsePb setExperiments(Collection experiments) { + this.experiments = experiments; + return this; + } + + public Collection getExperiments() { + return experiments; + } + + public SearchExperimentsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchExperimentsResponsePb that = (SearchExperimentsResponsePb) o; + return Objects.equals(experiments, that.experiments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(experiments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchExperimentsResponsePb.class) + .add("experiments", experiments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java index 495ca4cd7..41df025ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SearchLoggedModelsDataset.SearchLoggedModelsDatasetSerializer.class) +@JsonDeserialize(using = SearchLoggedModelsDataset.SearchLoggedModelsDatasetDeserializer.class) public class SearchLoggedModelsDataset { /** The digest of the dataset. */ - @JsonProperty("dataset_digest") private String datasetDigest; /** The name of the dataset. */ - @JsonProperty("dataset_name") private String datasetName; public SearchLoggedModelsDataset setDatasetDigest(String datasetDigest) { @@ -56,4 +65,43 @@ public String toString() { .add("datasetName", datasetName) .toString(); } + + SearchLoggedModelsDatasetPb toPb() { + SearchLoggedModelsDatasetPb pb = new SearchLoggedModelsDatasetPb(); + pb.setDatasetDigest(datasetDigest); + pb.setDatasetName(datasetName); + + return pb; + } + + static SearchLoggedModelsDataset fromPb(SearchLoggedModelsDatasetPb pb) { + SearchLoggedModelsDataset model = new SearchLoggedModelsDataset(); + model.setDatasetDigest(pb.getDatasetDigest()); + model.setDatasetName(pb.getDatasetName()); + + return model; + } + + public static class SearchLoggedModelsDatasetSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchLoggedModelsDataset value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchLoggedModelsDatasetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchLoggedModelsDatasetDeserializer + extends JsonDeserializer { + @Override + public SearchLoggedModelsDataset deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchLoggedModelsDatasetPb pb = mapper.readValue(p, SearchLoggedModelsDatasetPb.class); + return SearchLoggedModelsDataset.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDatasetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDatasetPb.java new file mode 100755 index 000000000..48ecec810 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDatasetPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SearchLoggedModelsDatasetPb { + @JsonProperty("dataset_digest") + private String datasetDigest; + + @JsonProperty("dataset_name") + private String datasetName; + + public SearchLoggedModelsDatasetPb setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public SearchLoggedModelsDatasetPb setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsDatasetPb that = (SearchLoggedModelsDatasetPb) o; + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName); + } + + @Override + public int hashCode() { + return Objects.hash(datasetDigest, datasetName); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsDatasetPb.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java index 72e69fe1d..2cddae176 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SearchLoggedModelsOrderBy.SearchLoggedModelsOrderBySerializer.class) +@JsonDeserialize(using = SearchLoggedModelsOrderBy.SearchLoggedModelsOrderByDeserializer.class) public class SearchLoggedModelsOrderBy { /** Whether the search results order is ascending or not. */ - @JsonProperty("ascending") private Boolean ascending; /** @@ -18,7 +28,6 @@ public class SearchLoggedModelsOrderBy { * with the metric. Only metrics associated with the specified dataset name and digest will be * considered for ordering. This field may only be set if ``dataset_name`` is also set. */ - @JsonProperty("dataset_digest") private String datasetDigest; /** @@ -26,11 +35,9 @@ public class SearchLoggedModelsOrderBy { * with the metric. Only metrics associated with the specified dataset name will be considered for * ordering. This field may only be set if ``field_name`` refers to a metric. */ - @JsonProperty("dataset_name") private String datasetName; /** The name of the field to order by, e.g. "metrics.accuracy". */ - @JsonProperty("field_name") private String fieldName; public SearchLoggedModelsOrderBy setAscending(Boolean ascending) { @@ -94,4 +101,47 @@ public String toString() { .add("fieldName", fieldName) .toString(); } + + SearchLoggedModelsOrderByPb toPb() { + SearchLoggedModelsOrderByPb pb = new SearchLoggedModelsOrderByPb(); + pb.setAscending(ascending); + pb.setDatasetDigest(datasetDigest); + pb.setDatasetName(datasetName); + pb.setFieldName(fieldName); + + return pb; + } + + static SearchLoggedModelsOrderBy fromPb(SearchLoggedModelsOrderByPb pb) { + SearchLoggedModelsOrderBy model = new SearchLoggedModelsOrderBy(); + model.setAscending(pb.getAscending()); + model.setDatasetDigest(pb.getDatasetDigest()); + model.setDatasetName(pb.getDatasetName()); + model.setFieldName(pb.getFieldName()); + + return model; + } + + public static class SearchLoggedModelsOrderBySerializer + extends JsonSerializer { + @Override + public void serialize( + SearchLoggedModelsOrderBy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchLoggedModelsOrderByPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchLoggedModelsOrderByDeserializer + extends JsonDeserializer { + @Override + public SearchLoggedModelsOrderBy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchLoggedModelsOrderByPb pb = mapper.readValue(p, SearchLoggedModelsOrderByPb.class); + return SearchLoggedModelsOrderBy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderByPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderByPb.java new file mode 100755 index 000000000..ee89a86ee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderByPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SearchLoggedModelsOrderByPb { + @JsonProperty("ascending") + private Boolean ascending; + + @JsonProperty("dataset_digest") + private String datasetDigest; + + @JsonProperty("dataset_name") + private String datasetName; + + @JsonProperty("field_name") + private String fieldName; + + public SearchLoggedModelsOrderByPb setAscending(Boolean ascending) { + this.ascending = ascending; + return this; + } + + public Boolean getAscending() { + return ascending; + } + + public SearchLoggedModelsOrderByPb setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public SearchLoggedModelsOrderByPb setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + public SearchLoggedModelsOrderByPb setFieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public String getFieldName() { + return fieldName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsOrderByPb that = (SearchLoggedModelsOrderByPb) o; + return Objects.equals(ascending, that.ascending) + && Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(fieldName, that.fieldName); + } + + @Override + public int hashCode() { + return Objects.hash(ascending, datasetDigest, datasetName, fieldName); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsOrderByPb.class) + .add("ascending", ascending) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .add("fieldName", fieldName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java index 17ff5a9c4..bb25203ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchLoggedModelsRequest.SearchLoggedModelsRequestSerializer.class) +@JsonDeserialize(using = SearchLoggedModelsRequest.SearchLoggedModelsRequestDeserializer.class) public class SearchLoggedModelsRequest { /** * List of datasets on which to apply the metrics filter clauses. For example, a filter with @@ -17,11 +28,9 @@ public class SearchLoggedModelsRequest { * the criteria are considered. If no datasets are specified, then metrics across all datasets are * considered in the filter. */ - @JsonProperty("datasets") private Collection datasets; /** The IDs of the experiments in which to search for logged models. */ - @JsonProperty("experiment_ids") private Collection experimentIds; /** @@ -30,19 +39,15 @@ public class SearchLoggedModelsRequest { * *

Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. */ - @JsonProperty("filter") private String filter; /** The maximum number of Logged Models to return. The maximum limit is 50. */ - @JsonProperty("max_results") private Long maxResults; /** The list of columns for ordering the results, with additional fields for sorting criteria. */ - @JsonProperty("order_by") private Collection orderBy; /** The token indicating the page of logged models to fetch. */ - @JsonProperty("page_token") private String pageToken; public SearchLoggedModelsRequest setDatasets(Collection datasets) { @@ -128,4 +133,51 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + SearchLoggedModelsRequestPb toPb() { + SearchLoggedModelsRequestPb pb = new SearchLoggedModelsRequestPb(); + pb.setDatasets(datasets); + pb.setExperimentIds(experimentIds); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + + return pb; + } + + static SearchLoggedModelsRequest fromPb(SearchLoggedModelsRequestPb pb) { + SearchLoggedModelsRequest model = new SearchLoggedModelsRequest(); + model.setDatasets(pb.getDatasets()); + model.setExperimentIds(pb.getExperimentIds()); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class SearchLoggedModelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchLoggedModelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchLoggedModelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchLoggedModelsRequestDeserializer + extends JsonDeserializer { + @Override + public SearchLoggedModelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchLoggedModelsRequestPb pb = mapper.readValue(p, SearchLoggedModelsRequestPb.class); + return SearchLoggedModelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequestPb.java new file mode 100755 index 000000000..b1ae06c12 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequestPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchLoggedModelsRequestPb { + @JsonProperty("datasets") + private Collection datasets; + + @JsonProperty("experiment_ids") + private Collection experimentIds; + + @JsonProperty("filter") + private String filter; + + @JsonProperty("max_results") + private Long maxResults; + + @JsonProperty("order_by") + private Collection orderBy; + + @JsonProperty("page_token") + private String pageToken; + + public SearchLoggedModelsRequestPb setDatasets(Collection datasets) { + this.datasets = datasets; + return this; + } + + public Collection getDatasets() { + return datasets; + } + + public SearchLoggedModelsRequestPb setExperimentIds(Collection experimentIds) { + this.experimentIds = experimentIds; + return this; + } + + public Collection getExperimentIds() { + return experimentIds; + } + + public SearchLoggedModelsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchLoggedModelsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchLoggedModelsRequestPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchLoggedModelsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsRequestPb that = (SearchLoggedModelsRequestPb) o; + return Objects.equals(datasets, that.datasets) + && Objects.equals(experimentIds, that.experimentIds) + && Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(datasets, experimentIds, filter, maxResults, orderBy, pageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsRequestPb.class) + .add("datasets", datasets) + .add("experimentIds", experimentIds) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java index c96b840ef..fd0e7a23f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchLoggedModelsResponse.SearchLoggedModelsResponseSerializer.class) +@JsonDeserialize(using = SearchLoggedModelsResponse.SearchLoggedModelsResponseDeserializer.class) public class SearchLoggedModelsResponse { /** Logged models that match the search criteria. */ - @JsonProperty("models") private Collection models; /** The token that can be used to retrieve the next page of logged models. */ - @JsonProperty("next_page_token") private String nextPageToken; public SearchLoggedModelsResponse setModels(Collection models) { @@ -56,4 +65,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + SearchLoggedModelsResponsePb toPb() { + SearchLoggedModelsResponsePb pb = new SearchLoggedModelsResponsePb(); + pb.setModels(models); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static SearchLoggedModelsResponse fromPb(SearchLoggedModelsResponsePb pb) { + SearchLoggedModelsResponse model = new SearchLoggedModelsResponse(); + model.setModels(pb.getModels()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class SearchLoggedModelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchLoggedModelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchLoggedModelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchLoggedModelsResponseDeserializer + extends JsonDeserializer { + @Override + public SearchLoggedModelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchLoggedModelsResponsePb pb = mapper.readValue(p, SearchLoggedModelsResponsePb.class); + return SearchLoggedModelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponsePb.java new file mode 100755 index 000000000..9469bd4b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchLoggedModelsResponsePb { + @JsonProperty("models") + private Collection models; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchLoggedModelsResponsePb setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + public SearchLoggedModelsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsResponsePb that = (SearchLoggedModelsResponsePb) o; + return Objects.equals(models, that.models) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(models, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsResponsePb.class) + .add("models", models) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java index c4eb51d14..ab1306919 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java @@ -3,26 +3,32 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Searches model versions */ @Generated +@JsonSerialize(using = SearchModelVersionsRequest.SearchModelVersionsRequestSerializer.class) +@JsonDeserialize(using = SearchModelVersionsRequest.SearchModelVersionsRequestDeserializer.class) public class SearchModelVersionsRequest { /** * String filter condition, like "name='my-model-name'". Must be a single boolean condition, with * string values wrapped in single quotes. */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Maximum number of models desired. Max threshold is 10K. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** @@ -30,13 +36,9 @@ public class SearchModelVersionsRequest { * or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by latest stage transition * timestamp, followed by name ASC, followed by version DESC. */ - @JsonIgnore - @QueryParam("order_by") private Collection orderBy; /** Pagination token to go to next page based on previous search query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public SearchModelVersionsRequest setFilter(String filter) { @@ -100,4 +102,47 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + SearchModelVersionsRequestPb toPb() { + SearchModelVersionsRequestPb pb = new SearchModelVersionsRequestPb(); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + + return pb; + } + + static SearchModelVersionsRequest fromPb(SearchModelVersionsRequestPb pb) { + SearchModelVersionsRequest model = new SearchModelVersionsRequest(); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class SearchModelVersionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchModelVersionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchModelVersionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchModelVersionsRequestDeserializer + extends JsonDeserializer { + @Override + public SearchModelVersionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchModelVersionsRequestPb pb = mapper.readValue(p, SearchModelVersionsRequestPb.class); + return SearchModelVersionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequestPb.java new file mode 100755 index 000000000..d3d417fb4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequestPb.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Searches model versions */ +@Generated +class SearchModelVersionsRequestPb { + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("order_by") + private Collection orderBy; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public SearchModelVersionsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchModelVersionsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchModelVersionsRequestPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchModelVersionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchModelVersionsRequestPb that = (SearchModelVersionsRequestPb) o; + return Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filter, maxResults, orderBy, pageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchModelVersionsRequestPb.class) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java index e1b91c9bd..a39a1a79f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchModelVersionsResponse.SearchModelVersionsResponseSerializer.class) +@JsonDeserialize(using = SearchModelVersionsResponse.SearchModelVersionsResponseDeserializer.class) public class SearchModelVersionsResponse { /** Models that match the search criteria */ - @JsonProperty("model_versions") private Collection modelVersions; /** Pagination token to request next page of models for the same search query. */ - @JsonProperty("next_page_token") private String nextPageToken; public SearchModelVersionsResponse setModelVersions(Collection modelVersions) { @@ -57,4 +66,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + SearchModelVersionsResponsePb toPb() { + SearchModelVersionsResponsePb pb = new SearchModelVersionsResponsePb(); + pb.setModelVersions(modelVersions); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static SearchModelVersionsResponse fromPb(SearchModelVersionsResponsePb pb) { + SearchModelVersionsResponse model = new SearchModelVersionsResponse(); + model.setModelVersions(pb.getModelVersions()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class SearchModelVersionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SearchModelVersionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchModelVersionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchModelVersionsResponseDeserializer + extends JsonDeserializer { + @Override + public SearchModelVersionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchModelVersionsResponsePb pb = mapper.readValue(p, SearchModelVersionsResponsePb.class); + return SearchModelVersionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponsePb.java new file mode 100755 index 000000000..77465a5fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchModelVersionsResponsePb { + @JsonProperty("model_versions") + private Collection modelVersions; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchModelVersionsResponsePb setModelVersions(Collection modelVersions) { + this.modelVersions = modelVersions; + return this; + } + + public Collection getModelVersions() { + return modelVersions; + } + + public SearchModelVersionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchModelVersionsResponsePb that = (SearchModelVersionsResponsePb) o; + return Objects.equals(modelVersions, that.modelVersions) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersions, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchModelVersionsResponsePb.class) + .add("modelVersions", modelVersions) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java index abbf2de98..4b24a1218 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java @@ -3,27 +3,33 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Search models */ @Generated +@JsonSerialize(using = SearchModelsRequest.SearchModelsRequestSerializer.class) +@JsonDeserialize(using = SearchModelsRequest.SearchModelsRequestDeserializer.class) public class SearchModelsRequest { /** * String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend * automatically as "name LIKE '%my-model-name%'". Single boolean condition, with string values * wrapped in single quotes. */ - @JsonIgnore - @QueryParam("filter") private String filter; /** Maximum number of models desired. Default is 100. Max threshold is 1000. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** @@ -31,13 +37,9 @@ public class SearchModelsRequest { * timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks * are done by model name ASC. */ - @JsonIgnore - @QueryParam("order_by") private Collection orderBy; /** Pagination token to go to the next page based on a previous search query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public SearchModelsRequest setFilter(String filter) { @@ -101,4 +103,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + SearchModelsRequestPb toPb() { + SearchModelsRequestPb pb = new SearchModelsRequestPb(); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + + return pb; + } + + static SearchModelsRequest fromPb(SearchModelsRequestPb pb) { + SearchModelsRequest model = new SearchModelsRequest(); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class SearchModelsRequestSerializer extends JsonSerializer { + @Override + public void serialize(SearchModelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchModelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchModelsRequestDeserializer + extends JsonDeserializer { + @Override + public SearchModelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchModelsRequestPb pb = mapper.readValue(p, SearchModelsRequestPb.class); + return SearchModelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequestPb.java new file mode 100755 index 000000000..2c28e1e7b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequestPb.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Search models */ +@Generated +class SearchModelsRequestPb { + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("order_by") + private Collection orderBy; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public SearchModelsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchModelsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchModelsRequestPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchModelsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchModelsRequestPb that = (SearchModelsRequestPb) o; + return Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filter, maxResults, orderBy, pageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchModelsRequestPb.class) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponse.java index 48dea8019..15f03dfc2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchModelsResponse.SearchModelsResponseSerializer.class) +@JsonDeserialize(using = SearchModelsResponse.SearchModelsResponseDeserializer.class) public class SearchModelsResponse { /** Pagination token to request the next page of models. */ - @JsonProperty("next_page_token") private String nextPageToken; /** Registered Models that match the search criteria. */ - @JsonProperty("registered_models") private Collection registeredModels; public SearchModelsResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,42 @@ public String toString() { .add("registeredModels", registeredModels) .toString(); } + + SearchModelsResponsePb toPb() { + SearchModelsResponsePb pb = new SearchModelsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRegisteredModels(registeredModels); + + return pb; + } + + static SearchModelsResponse fromPb(SearchModelsResponsePb pb) { + SearchModelsResponse model = new SearchModelsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRegisteredModels(pb.getRegisteredModels()); + + return model; + } + + public static class SearchModelsResponseSerializer extends JsonSerializer { + @Override + public void serialize( + SearchModelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchModelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchModelsResponseDeserializer + extends JsonDeserializer { + @Override + public SearchModelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchModelsResponsePb pb = mapper.readValue(p, SearchModelsResponsePb.class); + return SearchModelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponsePb.java new file mode 100755 index 000000000..4d7878db6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchModelsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("registered_models") + private Collection registeredModels; + + public SearchModelsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public SearchModelsResponsePb setRegisteredModels(Collection registeredModels) { + this.registeredModels = registeredModels; + return this; + } + + public Collection getRegisteredModels() { + return registeredModels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchModelsResponsePb that = (SearchModelsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(registeredModels, that.registeredModels); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, registeredModels); + } + + @Override + public String toString() { + return new ToStringer(SearchModelsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("registeredModels", registeredModels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java index 89882d591..f7e1ba92d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchRuns.SearchRunsSerializer.class) +@JsonDeserialize(using = SearchRuns.SearchRunsDeserializer.class) public class SearchRuns { /** List of experiment IDs to search over. */ - @JsonProperty("experiment_ids") private Collection experimentIds; /** @@ -26,11 +36,9 @@ public class SearchRuns { * *

Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`. */ - @JsonProperty("filter") private String filter; /** Maximum number of runs desired. Max threshold is 50000 */ - @JsonProperty("max_results") private Long maxResults; /** @@ -40,15 +48,12 @@ public class SearchRuns { * by `run_id` for runs with the same start time (and this is the default ordering criterion if * order_by is not provided). */ - @JsonProperty("order_by") private Collection orderBy; /** Token for the current page of runs. */ - @JsonProperty("page_token") private String pageToken; /** Whether to display only active, only deleted, or all runs. Defaults to only active runs. */ - @JsonProperty("run_view_type") private ViewType runViewType; public SearchRuns setExperimentIds(Collection experimentIds) { @@ -134,4 +139,47 @@ public String toString() { .add("runViewType", runViewType) .toString(); } + + SearchRunsPb toPb() { + SearchRunsPb pb = new SearchRunsPb(); + pb.setExperimentIds(experimentIds); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + pb.setRunViewType(runViewType); + + return pb; + } + + static SearchRuns fromPb(SearchRunsPb pb) { + SearchRuns model = new SearchRuns(); + model.setExperimentIds(pb.getExperimentIds()); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + model.setRunViewType(pb.getRunViewType()); + + return model; + } + + public static class SearchRunsSerializer extends JsonSerializer { + @Override + public void serialize(SearchRuns value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchRunsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchRunsDeserializer extends JsonDeserializer { + @Override + public SearchRuns deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchRunsPb pb = mapper.readValue(p, SearchRunsPb.class); + return SearchRuns.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsPb.java new file mode 100755 index 000000000..0b3dba6b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchRunsPb { + @JsonProperty("experiment_ids") + private Collection experimentIds; + + @JsonProperty("filter") + private String filter; + + @JsonProperty("max_results") + private Long maxResults; + + @JsonProperty("order_by") + private Collection orderBy; + + @JsonProperty("page_token") + private String pageToken; + + @JsonProperty("run_view_type") + private ViewType runViewType; + + public SearchRunsPb setExperimentIds(Collection experimentIds) { + this.experimentIds = experimentIds; + return this; + } + + public Collection getExperimentIds() { + return experimentIds; + } + + public SearchRunsPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchRunsPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchRunsPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchRunsPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public SearchRunsPb setRunViewType(ViewType runViewType) { + this.runViewType = runViewType; + return this; + } + + public ViewType getRunViewType() { + return runViewType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchRunsPb that = (SearchRunsPb) o; + return Objects.equals(experimentIds, that.experimentIds) + && Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(runViewType, that.runViewType); + } + + @Override + public int hashCode() { + return Objects.hash(experimentIds, filter, maxResults, orderBy, pageToken, runViewType); + } + + @Override + public String toString() { + return new ToStringer(SearchRunsPb.class) + .add("experimentIds", experimentIds) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .add("runViewType", runViewType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java index 43e9ebe6f..1e1cbe1a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SearchRunsResponse.SearchRunsResponseSerializer.class) +@JsonDeserialize(using = SearchRunsResponse.SearchRunsResponseDeserializer.class) public class SearchRunsResponse { /** Token for the next page of runs. */ - @JsonProperty("next_page_token") private String nextPageToken; /** Runs that match the search criteria. */ - @JsonProperty("runs") private Collection runs; public SearchRunsResponse setNextPageToken(String nextPageToken) { @@ -56,4 +65,40 @@ public String toString() { .add("runs", runs) .toString(); } + + SearchRunsResponsePb toPb() { + SearchRunsResponsePb pb = new SearchRunsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRuns(runs); + + return pb; + } + + static SearchRunsResponse fromPb(SearchRunsResponsePb pb) { + SearchRunsResponse model = new SearchRunsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRuns(pb.getRuns()); + + return model; + } + + public static class SearchRunsResponseSerializer extends JsonSerializer { + @Override + public void serialize(SearchRunsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SearchRunsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SearchRunsResponseDeserializer extends JsonDeserializer { + @Override + public SearchRunsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SearchRunsResponsePb pb = mapper.readValue(p, SearchRunsResponsePb.class); + return SearchRunsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponsePb.java new file mode 100755 index 000000000..d16ffcb44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SearchRunsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("runs") + private Collection runs; + + public SearchRunsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public SearchRunsResponsePb setRuns(Collection runs) { + this.runs = runs; + return this; + } + + public Collection getRuns() { + return runs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchRunsResponsePb that = (SearchRunsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(runs, that.runs); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, runs); + } + + @Override + public String toString() { + return new ToStringer(SearchRunsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("runs", runs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java index 4f4189737..14f04e024 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetExperimentTag.SetExperimentTagSerializer.class) +@JsonDeserialize(using = SetExperimentTag.SetExperimentTagDeserializer.class) public class SetExperimentTag { /** ID of the experiment under which to log the tag. Must be provided. */ - @JsonProperty("experiment_id") private String experimentId; /** Name of the tag. Keys up to 250 bytes in size are supported. */ - @JsonProperty("key") private String key; /** String value of the tag being logged. Values up to 64KB in size are supported. */ - @JsonProperty("value") private String value; public SetExperimentTag setExperimentId(String experimentId) { @@ -71,4 +79,42 @@ public String toString() { .add("value", value) .toString(); } + + SetExperimentTagPb toPb() { + SetExperimentTagPb pb = new SetExperimentTagPb(); + pb.setExperimentId(experimentId); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static SetExperimentTag fromPb(SetExperimentTagPb pb) { + SetExperimentTag model = new SetExperimentTag(); + model.setExperimentId(pb.getExperimentId()); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class SetExperimentTagSerializer extends JsonSerializer { + @Override + public void serialize(SetExperimentTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetExperimentTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetExperimentTagDeserializer extends JsonDeserializer { + @Override + public SetExperimentTag deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetExperimentTagPb pb = mapper.readValue(p, SetExperimentTagPb.class); + return SetExperimentTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagPb.java new file mode 100755 index 000000000..a41ffc2db --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SetExperimentTagPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public SetExperimentTagPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public SetExperimentTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SetExperimentTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetExperimentTagPb that = (SetExperimentTagPb) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(key, that.key) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, key, value); + } + + @Override + public String toString() { + return new ToStringer(SetExperimentTagPb.class) + .add("experimentId", experimentId) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java index 2f62954b2..ffc4b5467 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetExperimentTagResponse.SetExperimentTagResponseSerializer.class) +@JsonDeserialize(using = SetExperimentTagResponse.SetExperimentTagResponseDeserializer.class) public class SetExperimentTagResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(SetExperimentTagResponse.class).toString(); } + + SetExperimentTagResponsePb toPb() { + SetExperimentTagResponsePb pb = new SetExperimentTagResponsePb(); + + return pb; + } + + static SetExperimentTagResponse fromPb(SetExperimentTagResponsePb pb) { + SetExperimentTagResponse model = new SetExperimentTagResponse(); + + return model; + } + + public static class SetExperimentTagResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SetExperimentTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetExperimentTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetExperimentTagResponseDeserializer + extends JsonDeserializer { + @Override + public SetExperimentTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetExperimentTagResponsePb pb = mapper.readValue(p, SetExperimentTagResponsePb.class); + return SetExperimentTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponsePb.java new file mode 100755 index 000000000..8af396659 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetExperimentTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetExperimentTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java index 244249d38..b6dcbc9ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SetLoggedModelTagsRequest.SetLoggedModelTagsRequestSerializer.class) +@JsonDeserialize(using = SetLoggedModelTagsRequest.SetLoggedModelTagsRequestDeserializer.class) public class SetLoggedModelTagsRequest { /** The ID of the logged model to set the tags on. */ - @JsonIgnore private String modelId; + private String modelId; /** The tags to set on the logged model. */ - @JsonProperty("tags") private Collection tags; public SetLoggedModelTagsRequest setModelId(String modelId) { @@ -56,4 +65,43 @@ public String toString() { .add("tags", tags) .toString(); } + + SetLoggedModelTagsRequestPb toPb() { + SetLoggedModelTagsRequestPb pb = new SetLoggedModelTagsRequestPb(); + pb.setModelId(modelId); + pb.setTags(tags); + + return pb; + } + + static SetLoggedModelTagsRequest fromPb(SetLoggedModelTagsRequestPb pb) { + SetLoggedModelTagsRequest model = new SetLoggedModelTagsRequest(); + model.setModelId(pb.getModelId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class SetLoggedModelTagsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SetLoggedModelTagsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetLoggedModelTagsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetLoggedModelTagsRequestDeserializer + extends JsonDeserializer { + @Override + public SetLoggedModelTagsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetLoggedModelTagsRequestPb pb = mapper.readValue(p, SetLoggedModelTagsRequestPb.class); + return SetLoggedModelTagsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequestPb.java new file mode 100755 index 000000000..0fec11e79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SetLoggedModelTagsRequestPb { + @JsonIgnore private String modelId; + + @JsonProperty("tags") + private Collection tags; + + public SetLoggedModelTagsRequestPb setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public SetLoggedModelTagsRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetLoggedModelTagsRequestPb that = (SetLoggedModelTagsRequestPb) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, tags); + } + + @Override + public String toString() { + return new ToStringer(SetLoggedModelTagsRequestPb.class) + .add("modelId", modelId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java index 924dacc20..3e78003eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetLoggedModelTagsResponse.SetLoggedModelTagsResponseSerializer.class) +@JsonDeserialize(using = SetLoggedModelTagsResponse.SetLoggedModelTagsResponseDeserializer.class) public class SetLoggedModelTagsResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(SetLoggedModelTagsResponse.class).toString(); } + + SetLoggedModelTagsResponsePb toPb() { + SetLoggedModelTagsResponsePb pb = new SetLoggedModelTagsResponsePb(); + + return pb; + } + + static SetLoggedModelTagsResponse fromPb(SetLoggedModelTagsResponsePb pb) { + SetLoggedModelTagsResponse model = new SetLoggedModelTagsResponse(); + + return model; + } + + public static class SetLoggedModelTagsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SetLoggedModelTagsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetLoggedModelTagsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetLoggedModelTagsResponseDeserializer + extends JsonDeserializer { + @Override + public SetLoggedModelTagsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetLoggedModelTagsResponsePb pb = mapper.readValue(p, SetLoggedModelTagsResponsePb.class); + return SetLoggedModelTagsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponsePb.java new file mode 100755 index 000000000..522704fc5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetLoggedModelTagsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetLoggedModelTagsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java index dfb417bf2..7809c8bb5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java @@ -4,28 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetModelTagRequest.SetModelTagRequestSerializer.class) +@JsonDeserialize(using = SetModelTagRequest.SetModelTagRequestDeserializer.class) public class SetModelTagRequest { /** * Name of the tag. Maximum size depends on storage backend. If a tag with this name already * exists, its preexisting value will be replaced by the specified `value`. All storage backends * are guaranteed to support key values up to 250 bytes in size. */ - @JsonProperty("key") private String key; /** Unique name of the model. */ - @JsonProperty("name") private String name; /** * String value of the tag being logged. Maximum size depends on storage backend. All storage * backends are guaranteed to support key values up to 5000 bytes in size. */ - @JsonProperty("value") private String value; public SetModelTagRequest setKey(String key) { @@ -78,4 +86,42 @@ public String toString() { .add("value", value) .toString(); } + + SetModelTagRequestPb toPb() { + SetModelTagRequestPb pb = new SetModelTagRequestPb(); + pb.setKey(key); + pb.setName(name); + pb.setValue(value); + + return pb; + } + + static SetModelTagRequest fromPb(SetModelTagRequestPb pb) { + SetModelTagRequest model = new SetModelTagRequest(); + model.setKey(pb.getKey()); + model.setName(pb.getName()); + model.setValue(pb.getValue()); + + return model; + } + + public static class SetModelTagRequestSerializer extends JsonSerializer { + @Override + public void serialize(SetModelTagRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetModelTagRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetModelTagRequestDeserializer extends JsonDeserializer { + @Override + public SetModelTagRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetModelTagRequestPb pb = mapper.readValue(p, SetModelTagRequestPb.class); + return SetModelTagRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequestPb.java similarity index 58% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequestPb.java index 053a8991c..5dfffbcfe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequestPb.java @@ -8,16 +8,26 @@ import java.util.Objects; @Generated -public class ArtifactCredentialInfoHttpHeader { - /** The HTTP header name. */ +class SetModelTagRequestPb { + @JsonProperty("key") + private String key; + @JsonProperty("name") private String name; - /** The HTTP header value. */ @JsonProperty("value") private String value; - public ArtifactCredentialInfoHttpHeader setName(String name) { + public SetModelTagRequestPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SetModelTagRequestPb setName(String name) { this.name = name; return this; } @@ -26,7 +36,7 @@ public String getName() { return name; } - public ArtifactCredentialInfoHttpHeader setValue(String value) { + public SetModelTagRequestPb setValue(String value) { this.value = value; return this; } @@ -39,18 +49,21 @@ public String getValue() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfoHttpHeader that = (ArtifactCredentialInfoHttpHeader) o; - return Objects.equals(name, that.name) && Objects.equals(value, that.value); + SetModelTagRequestPb that = (SetModelTagRequestPb) o; + return Objects.equals(key, that.key) + && Objects.equals(name, that.name) + && Objects.equals(value, that.value); } @Override public int hashCode() { - return Objects.hash(name, value); + return Objects.hash(key, name, value); } @Override public String toString() { - return new ToStringer(ArtifactCredentialInfoHttpHeader.class) + return new ToStringer(SetModelTagRequestPb.class) + .add("key", key) .add("name", name) .add("value", value) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java index a741183c3..a55dbae00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetModelTagResponse.SetModelTagResponseSerializer.class) +@JsonDeserialize(using = SetModelTagResponse.SetModelTagResponseDeserializer.class) public class SetModelTagResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(SetModelTagResponse.class).toString(); } + + SetModelTagResponsePb toPb() { + SetModelTagResponsePb pb = new SetModelTagResponsePb(); + + return pb; + } + + static SetModelTagResponse fromPb(SetModelTagResponsePb pb) { + SetModelTagResponse model = new SetModelTagResponse(); + + return model; + } + + public static class SetModelTagResponseSerializer extends JsonSerializer { + @Override + public void serialize(SetModelTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetModelTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetModelTagResponseDeserializer + extends JsonDeserializer { + @Override + public SetModelTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetModelTagResponsePb pb = mapper.readValue(p, SetModelTagResponsePb.class); + return SetModelTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponsePb.java new file mode 100755 index 000000000..f237cd448 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetModelTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetModelTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java index 6e9901b97..a80607564 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java @@ -4,32 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetModelVersionTagRequest.SetModelVersionTagRequestSerializer.class) +@JsonDeserialize(using = SetModelVersionTagRequest.SetModelVersionTagRequestDeserializer.class) public class SetModelVersionTagRequest { /** * Name of the tag. Maximum size depends on storage backend. If a tag with this name already * exists, its preexisting value will be replaced by the specified `value`. All storage backends * are guaranteed to support key values up to 250 bytes in size. */ - @JsonProperty("key") private String key; /** Unique name of the model. */ - @JsonProperty("name") private String name; /** * String value of the tag being logged. Maximum size depends on storage backend. All storage * backends are guaranteed to support key values up to 5000 bytes in size. */ - @JsonProperty("value") private String value; /** Model version number. */ - @JsonProperty("version") private String version; public SetModelVersionTagRequest setKey(String key) { @@ -93,4 +100,47 @@ public String toString() { .add("version", version) .toString(); } + + SetModelVersionTagRequestPb toPb() { + SetModelVersionTagRequestPb pb = new SetModelVersionTagRequestPb(); + pb.setKey(key); + pb.setName(name); + pb.setValue(value); + pb.setVersion(version); + + return pb; + } + + static SetModelVersionTagRequest fromPb(SetModelVersionTagRequestPb pb) { + SetModelVersionTagRequest model = new SetModelVersionTagRequest(); + model.setKey(pb.getKey()); + model.setName(pb.getName()); + model.setValue(pb.getValue()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class SetModelVersionTagRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SetModelVersionTagRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetModelVersionTagRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetModelVersionTagRequestDeserializer + extends JsonDeserializer { + @Override + public SetModelVersionTagRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetModelVersionTagRequestPb pb = mapper.readValue(p, SetModelVersionTagRequestPb.class); + return SetModelVersionTagRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequestPb.java new file mode 100755 index 000000000..afab08316 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SetModelVersionTagRequestPb { + @JsonProperty("key") + private String key; + + @JsonProperty("name") + private String name; + + @JsonProperty("value") + private String value; + + @JsonProperty("version") + private String version; + + public SetModelVersionTagRequestPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SetModelVersionTagRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SetModelVersionTagRequestPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + public SetModelVersionTagRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetModelVersionTagRequestPb that = (SetModelVersionTagRequestPb) o; + return Objects.equals(key, that.key) + && Objects.equals(name, that.name) + && Objects.equals(value, that.value) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(key, name, value, version); + } + + @Override + public String toString() { + return new ToStringer(SetModelVersionTagRequestPb.class) + .add("key", key) + .add("name", name) + .add("value", value) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java index 26d6245bf..e0e94e571 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetModelVersionTagResponse.SetModelVersionTagResponseSerializer.class) +@JsonDeserialize(using = SetModelVersionTagResponse.SetModelVersionTagResponseDeserializer.class) public class SetModelVersionTagResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(SetModelVersionTagResponse.class).toString(); } + + SetModelVersionTagResponsePb toPb() { + SetModelVersionTagResponsePb pb = new SetModelVersionTagResponsePb(); + + return pb; + } + + static SetModelVersionTagResponse fromPb(SetModelVersionTagResponsePb pb) { + SetModelVersionTagResponse model = new SetModelVersionTagResponse(); + + return model; + } + + public static class SetModelVersionTagResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SetModelVersionTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetModelVersionTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetModelVersionTagResponseDeserializer + extends JsonDeserializer { + @Override + public SetModelVersionTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetModelVersionTagResponsePb pb = mapper.readValue(p, SetModelVersionTagResponsePb.class); + return SetModelVersionTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponsePb.java new file mode 100755 index 000000000..5845e8c6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetModelVersionTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetModelVersionTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java index 71795835f..d40f942a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetTag.SetTagSerializer.class) +@JsonDeserialize(using = SetTag.SetTagDeserializer.class) public class SetTag { /** Name of the tag. Keys up to 250 bytes in size are supported. */ - @JsonProperty("key") private String key; /** ID of the run under which to log the tag. Must be provided. */ - @JsonProperty("run_id") private String runId; /** * [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be * removed in a future MLflow version. */ - @JsonProperty("run_uuid") private String runUuid; /** String value of the tag being logged. Values up to 64KB in size are supported. */ - @JsonProperty("value") private String value; public SetTag setKey(String key) { @@ -89,4 +96,43 @@ public String toString() { .add("value", value) .toString(); } + + SetTagPb toPb() { + SetTagPb pb = new SetTagPb(); + pb.setKey(key); + pb.setRunId(runId); + pb.setRunUuid(runUuid); + pb.setValue(value); + + return pb; + } + + static SetTag fromPb(SetTagPb pb) { + SetTag model = new SetTag(); + model.setKey(pb.getKey()); + model.setRunId(pb.getRunId()); + model.setRunUuid(pb.getRunUuid()); + model.setValue(pb.getValue()); + + return model; + } + + public static class SetTagSerializer extends JsonSerializer { + @Override + public void serialize(SetTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetTagDeserializer extends JsonDeserializer { + @Override + public SetTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetTagPb pb = mapper.readValue(p, SetTagPb.class); + return SetTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagPb.java new file mode 100755 index 000000000..03ceb1fe5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SetTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_uuid") + private String runUuid; + + @JsonProperty("value") + private String value; + + public SetTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SetTagPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public SetTagPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + public SetTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetTagPb that = (SetTagPb) o; + return Objects.equals(key, that.key) + && Objects.equals(runId, that.runId) + && Objects.equals(runUuid, that.runUuid) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, runId, runUuid, value); + } + + @Override + public String toString() { + return new ToStringer(SetTagPb.class) + .add("key", key) + .add("runId", runId) + .add("runUuid", runUuid) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java index 89d485ce0..f03fc3adf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetTagResponse.SetTagResponseSerializer.class) +@JsonDeserialize(using = SetTagResponse.SetTagResponseDeserializer.class) public class SetTagResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(SetTagResponse.class).toString(); } + + SetTagResponsePb toPb() { + SetTagResponsePb pb = new SetTagResponsePb(); + + return pb; + } + + static SetTagResponse fromPb(SetTagResponsePb pb) { + SetTagResponse model = new SetTagResponse(); + + return model; + } + + public static class SetTagResponseSerializer extends JsonSerializer { + @Override + public void serialize(SetTagResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetTagResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetTagResponseDeserializer extends JsonDeserializer { + @Override + public SetTagResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetTagResponsePb pb = mapper.readValue(p, SetTagResponsePb.class); + return SetTagResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponsePb.java new file mode 100755 index 000000000..5cdb11919 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetTagResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetTagResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java index 16729c8e2..0ac7c037f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Test webhook response object. */ @Generated +@JsonSerialize(using = TestRegistryWebhook.TestRegistryWebhookSerializer.class) +@JsonDeserialize(using = TestRegistryWebhook.TestRegistryWebhookDeserializer.class) public class TestRegistryWebhook { /** Body of the response from the webhook URL */ - @JsonProperty("body") private String body; /** Status code returned by the webhook URL */ - @JsonProperty("status_code") private Long statusCode; public TestRegistryWebhook setBody(String body) { @@ -56,4 +65,41 @@ public String toString() { .add("statusCode", statusCode) .toString(); } + + TestRegistryWebhookPb toPb() { + TestRegistryWebhookPb pb = new TestRegistryWebhookPb(); + pb.setBody(body); + pb.setStatusCode(statusCode); + + return pb; + } + + static TestRegistryWebhook fromPb(TestRegistryWebhookPb pb) { + TestRegistryWebhook model = new TestRegistryWebhook(); + model.setBody(pb.getBody()); + model.setStatusCode(pb.getStatusCode()); + + return model; + } + + public static class TestRegistryWebhookSerializer extends JsonSerializer { + @Override + public void serialize(TestRegistryWebhook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TestRegistryWebhookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TestRegistryWebhookDeserializer + extends JsonDeserializer { + @Override + public TestRegistryWebhook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TestRegistryWebhookPb pb = mapper.readValue(p, TestRegistryWebhookPb.class); + return TestRegistryWebhook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookPb.java new file mode 100755 index 000000000..f0e53ad3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Test webhook response object. */ +@Generated +class TestRegistryWebhookPb { + @JsonProperty("body") + private String body; + + @JsonProperty("status_code") + private Long statusCode; + + public TestRegistryWebhookPb setBody(String body) { + this.body = body; + return this; + } + + public String getBody() { + return body; + } + + public TestRegistryWebhookPb setStatusCode(Long statusCode) { + this.statusCode = statusCode; + return this; + } + + public Long getStatusCode() { + return statusCode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestRegistryWebhookPb that = (TestRegistryWebhookPb) o; + return Objects.equals(body, that.body) && Objects.equals(statusCode, that.statusCode); + } + + @Override + public int hashCode() { + return Objects.hash(body, statusCode); + } + + @Override + public String toString() { + return new ToStringer(TestRegistryWebhookPb.class) + .add("body", body) + .add("statusCode", statusCode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java index be3a7d261..45ab3ff9a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TestRegistryWebhookRequest.TestRegistryWebhookRequestSerializer.class) +@JsonDeserialize(using = TestRegistryWebhookRequest.TestRegistryWebhookRequestDeserializer.class) public class TestRegistryWebhookRequest { /** * If `event` is specified, the test trigger uses the specified event. If `event` is not * specified, the test trigger uses a randomly chosen event associated with the webhook. */ - @JsonProperty("event") private RegistryWebhookEvent event; /** Webhook ID */ - @JsonProperty("id") private String id; public TestRegistryWebhookRequest setEvent(RegistryWebhookEvent event) { @@ -58,4 +67,43 @@ public String toString() { .add("id", id) .toString(); } + + TestRegistryWebhookRequestPb toPb() { + TestRegistryWebhookRequestPb pb = new TestRegistryWebhookRequestPb(); + pb.setEvent(event); + pb.setId(id); + + return pb; + } + + static TestRegistryWebhookRequest fromPb(TestRegistryWebhookRequestPb pb) { + TestRegistryWebhookRequest model = new TestRegistryWebhookRequest(); + model.setEvent(pb.getEvent()); + model.setId(pb.getId()); + + return model; + } + + public static class TestRegistryWebhookRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + TestRegistryWebhookRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TestRegistryWebhookRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TestRegistryWebhookRequestDeserializer + extends JsonDeserializer { + @Override + public TestRegistryWebhookRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TestRegistryWebhookRequestPb pb = mapper.readValue(p, TestRegistryWebhookRequestPb.class); + return TestRegistryWebhookRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequestPb.java new file mode 100755 index 000000000..4432b1471 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TestRegistryWebhookRequestPb { + @JsonProperty("event") + private RegistryWebhookEvent event; + + @JsonProperty("id") + private String id; + + public TestRegistryWebhookRequestPb setEvent(RegistryWebhookEvent event) { + this.event = event; + return this; + } + + public RegistryWebhookEvent getEvent() { + return event; + } + + public TestRegistryWebhookRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestRegistryWebhookRequestPb that = (TestRegistryWebhookRequestPb) o; + return Objects.equals(event, that.event) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(event, id); + } + + @Override + public String toString() { + return new ToStringer(TestRegistryWebhookRequestPb.class) + .add("event", event) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java index f74b558bb..9d6277d23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TestRegistryWebhookResponse.TestRegistryWebhookResponseSerializer.class) +@JsonDeserialize(using = TestRegistryWebhookResponse.TestRegistryWebhookResponseDeserializer.class) public class TestRegistryWebhookResponse { /** Test webhook response object. */ - @JsonProperty("webhook") private TestRegistryWebhook webhook; public TestRegistryWebhookResponse setWebhook(TestRegistryWebhook webhook) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(TestRegistryWebhookResponse.class).add("webhook", webhook).toString(); } + + TestRegistryWebhookResponsePb toPb() { + TestRegistryWebhookResponsePb pb = new TestRegistryWebhookResponsePb(); + pb.setWebhook(webhook); + + return pb; + } + + static TestRegistryWebhookResponse fromPb(TestRegistryWebhookResponsePb pb) { + TestRegistryWebhookResponse model = new TestRegistryWebhookResponse(); + model.setWebhook(pb.getWebhook()); + + return model; + } + + public static class TestRegistryWebhookResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + TestRegistryWebhookResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TestRegistryWebhookResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TestRegistryWebhookResponseDeserializer + extends JsonDeserializer { + @Override + public TestRegistryWebhookResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TestRegistryWebhookResponsePb pb = mapper.readValue(p, TestRegistryWebhookResponsePb.class); + return TestRegistryWebhookResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponsePb.java new file mode 100755 index 000000000..3da03b329 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TestRegistryWebhookResponsePb { + @JsonProperty("webhook") + private TestRegistryWebhook webhook; + + public TestRegistryWebhookResponsePb setWebhook(TestRegistryWebhook webhook) { + this.webhook = webhook; + return this; + } + + public TestRegistryWebhook getWebhook() { + return webhook; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestRegistryWebhookResponsePb that = (TestRegistryWebhookResponsePb) o; + return Objects.equals(webhook, that.webhook); + } + + @Override + public int hashCode() { + return Objects.hash(webhook); + } + + @Override + public String toString() { + return new ToStringer(TestRegistryWebhookResponsePb.class).add("webhook", webhook).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java index 0e69b1bf3..2752209d5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java @@ -4,21 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + TransitionModelVersionStageDatabricks.TransitionModelVersionStageDatabricksSerializer.class) +@JsonDeserialize( + using = + TransitionModelVersionStageDatabricks.TransitionModelVersionStageDatabricksDeserializer + .class) public class TransitionModelVersionStageDatabricks { /** Specifies whether to archive all current model versions in the target stage. */ - @JsonProperty("archive_existing_versions") private Boolean archiveExistingVersions; /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Name of the model. */ - @JsonProperty("name") private String name; /** @@ -32,11 +45,9 @@ public class TransitionModelVersionStageDatabricks { * *

* `Archived`: Archived stage. */ - @JsonProperty("stage") private Stage stage; /** Version of the model. */ - @JsonProperty("version") private String version; public TransitionModelVersionStageDatabricks setArchiveExistingVersions( @@ -112,4 +123,50 @@ public String toString() { .add("version", version) .toString(); } + + TransitionModelVersionStageDatabricksPb toPb() { + TransitionModelVersionStageDatabricksPb pb = new TransitionModelVersionStageDatabricksPb(); + pb.setArchiveExistingVersions(archiveExistingVersions); + pb.setComment(comment); + pb.setName(name); + pb.setStage(stage); + pb.setVersion(version); + + return pb; + } + + static TransitionModelVersionStageDatabricks fromPb(TransitionModelVersionStageDatabricksPb pb) { + TransitionModelVersionStageDatabricks model = new TransitionModelVersionStageDatabricks(); + model.setArchiveExistingVersions(pb.getArchiveExistingVersions()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setStage(pb.getStage()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class TransitionModelVersionStageDatabricksSerializer + extends JsonSerializer { + @Override + public void serialize( + TransitionModelVersionStageDatabricks value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TransitionModelVersionStageDatabricksPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TransitionModelVersionStageDatabricksDeserializer + extends JsonDeserializer { + @Override + public TransitionModelVersionStageDatabricks deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TransitionModelVersionStageDatabricksPb pb = + mapper.readValue(p, TransitionModelVersionStageDatabricksPb.class); + return TransitionModelVersionStageDatabricks.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricksPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricksPb.java new file mode 100755 index 000000000..d99030907 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricksPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TransitionModelVersionStageDatabricksPb { + @JsonProperty("archive_existing_versions") + private Boolean archiveExistingVersions; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("stage") + private Stage stage; + + @JsonProperty("version") + private String version; + + public TransitionModelVersionStageDatabricksPb setArchiveExistingVersions( + Boolean archiveExistingVersions) { + this.archiveExistingVersions = archiveExistingVersions; + return this; + } + + public Boolean getArchiveExistingVersions() { + return archiveExistingVersions; + } + + public TransitionModelVersionStageDatabricksPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public TransitionModelVersionStageDatabricksPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public TransitionModelVersionStageDatabricksPb setStage(Stage stage) { + this.stage = stage; + return this; + } + + public Stage getStage() { + return stage; + } + + public TransitionModelVersionStageDatabricksPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransitionModelVersionStageDatabricksPb that = (TransitionModelVersionStageDatabricksPb) o; + return Objects.equals(archiveExistingVersions, that.archiveExistingVersions) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(stage, that.stage) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(archiveExistingVersions, comment, name, stage, version); + } + + @Override + public String toString() { + return new ToStringer(TransitionModelVersionStageDatabricksPb.class) + .add("archiveExistingVersions", archiveExistingVersions) + .add("comment", comment) + .add("name", name) + .add("stage", stage) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java index ebfb7c60e..c54a82207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java @@ -4,23 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Transition request details. */ @Generated +@JsonSerialize(using = TransitionRequest.TransitionRequestSerializer.class) +@JsonDeserialize(using = TransitionRequest.TransitionRequestDeserializer.class) public class TransitionRequest { /** Array of actions on the activity allowed for the current viewer. */ - @JsonProperty("available_actions") private Collection availableActions; /** User-provided comment associated with the transition request. */ - @JsonProperty("comment") private String comment; /** Creation time of the object, as a Unix timestamp in milliseconds. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** @@ -34,11 +42,9 @@ public class TransitionRequest { * *

* `Archived`: Archived stage. */ - @JsonProperty("to_stage") private Stage toStage; /** The username of the user that created the object. */ - @JsonProperty("user_id") private String userId; public TransitionRequest setAvailableActions(Collection availableActions) { @@ -113,4 +119,46 @@ public String toString() { .add("userId", userId) .toString(); } + + TransitionRequestPb toPb() { + TransitionRequestPb pb = new TransitionRequestPb(); + pb.setAvailableActions(availableActions); + pb.setComment(comment); + pb.setCreationTimestamp(creationTimestamp); + pb.setToStage(toStage); + pb.setUserId(userId); + + return pb; + } + + static TransitionRequest fromPb(TransitionRequestPb pb) { + TransitionRequest model = new TransitionRequest(); + model.setAvailableActions(pb.getAvailableActions()); + model.setComment(pb.getComment()); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setToStage(pb.getToStage()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class TransitionRequestSerializer extends JsonSerializer { + @Override + public void serialize(TransitionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TransitionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TransitionRequestDeserializer extends JsonDeserializer { + @Override + public TransitionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TransitionRequestPb pb = mapper.readValue(p, TransitionRequestPb.class); + return TransitionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequestPb.java new file mode 100755 index 000000000..428e128b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequestPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Transition request details. */ +@Generated +class TransitionRequestPb { + @JsonProperty("available_actions") + private Collection availableActions; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("to_stage") + private Stage toStage; + + @JsonProperty("user_id") + private String userId; + + public TransitionRequestPb setAvailableActions(Collection availableActions) { + this.availableActions = availableActions; + return this; + } + + public Collection getAvailableActions() { + return availableActions; + } + + public TransitionRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public TransitionRequestPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public TransitionRequestPb setToStage(Stage toStage) { + this.toStage = toStage; + return this; + } + + public Stage getToStage() { + return toStage; + } + + public TransitionRequestPb setUserId(String userId) { + this.userId = userId; + return this; + } + + public String getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransitionRequestPb that = (TransitionRequestPb) o; + return Objects.equals(availableActions, that.availableActions) + && Objects.equals(comment, that.comment) + && Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(toStage, that.toStage) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(availableActions, comment, creationTimestamp, toStage, userId); + } + + @Override + public String toString() { + return new ToStringer(TransitionRequestPb.class) + .add("availableActions", availableActions) + .add("comment", comment) + .add("creationTimestamp", creationTimestamp) + .add("toStage", toStage) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java index 22e210045..559edd358 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TransitionStageResponse.TransitionStageResponseSerializer.class) +@JsonDeserialize(using = TransitionStageResponse.TransitionStageResponseDeserializer.class) public class TransitionStageResponse { /** */ - @JsonProperty("model_version") private ModelVersionDatabricks modelVersion; public TransitionStageResponse setModelVersion(ModelVersionDatabricks modelVersion) { @@ -41,4 +51,41 @@ public String toString() { .add("modelVersion", modelVersion) .toString(); } + + TransitionStageResponsePb toPb() { + TransitionStageResponsePb pb = new TransitionStageResponsePb(); + pb.setModelVersion(modelVersion); + + return pb; + } + + static TransitionStageResponse fromPb(TransitionStageResponsePb pb) { + TransitionStageResponse model = new TransitionStageResponse(); + model.setModelVersion(pb.getModelVersion()); + + return model; + } + + public static class TransitionStageResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + TransitionStageResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TransitionStageResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TransitionStageResponseDeserializer + extends JsonDeserializer { + @Override + public TransitionStageResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TransitionStageResponsePb pb = mapper.readValue(p, TransitionStageResponsePb.class); + return TransitionStageResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponsePb.java new file mode 100755 index 000000000..b697c9327 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TransitionStageResponsePb { + @JsonProperty("model_version") + private ModelVersionDatabricks modelVersion; + + public TransitionStageResponsePb setModelVersion(ModelVersionDatabricks modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public ModelVersionDatabricks getModelVersion() { + return modelVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransitionStageResponsePb that = (TransitionStageResponsePb) o; + return Objects.equals(modelVersion, that.modelVersion); + } + + @Override + public int hashCode() { + return Objects.hash(modelVersion); + } + + @Override + public String toString() { + return new ToStringer(TransitionStageResponsePb.class) + .add("modelVersion", modelVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java index 22cec7325..e3d7e2c14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateComment.UpdateCommentSerializer.class) +@JsonDeserialize(using = UpdateComment.UpdateCommentDeserializer.class) public class UpdateComment { /** User-provided comment on the action. */ - @JsonProperty("comment") private String comment; /** Unique identifier of an activity */ - @JsonProperty("id") private String id; public UpdateComment setComment(String comment) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateComment.class).add("comment", comment).add("id", id).toString(); } + + UpdateCommentPb toPb() { + UpdateCommentPb pb = new UpdateCommentPb(); + pb.setComment(comment); + pb.setId(id); + + return pb; + } + + static UpdateComment fromPb(UpdateCommentPb pb) { + UpdateComment model = new UpdateComment(); + model.setComment(pb.getComment()); + model.setId(pb.getId()); + + return model; + } + + public static class UpdateCommentSerializer extends JsonSerializer { + @Override + public void serialize(UpdateComment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCommentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCommentDeserializer extends JsonDeserializer { + @Override + public UpdateComment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCommentPb pb = mapper.readValue(p, UpdateCommentPb.class); + return UpdateComment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentPb.java new file mode 100755 index 000000000..824f430d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateCommentPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("id") + private String id; + + public UpdateCommentPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateCommentPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCommentPb that = (UpdateCommentPb) o; + return Objects.equals(comment, that.comment) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(comment, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateCommentPb.class).add("comment", comment).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java index e453a67b6..584319f99 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCommentResponse.UpdateCommentResponseSerializer.class) +@JsonDeserialize(using = UpdateCommentResponse.UpdateCommentResponseDeserializer.class) public class UpdateCommentResponse { /** Comment details. */ - @JsonProperty("comment") private CommentObject comment; public UpdateCommentResponse setComment(CommentObject comment) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateCommentResponse.class).add("comment", comment).toString(); } + + UpdateCommentResponsePb toPb() { + UpdateCommentResponsePb pb = new UpdateCommentResponsePb(); + pb.setComment(comment); + + return pb; + } + + static UpdateCommentResponse fromPb(UpdateCommentResponsePb pb) { + UpdateCommentResponse model = new UpdateCommentResponse(); + model.setComment(pb.getComment()); + + return model; + } + + public static class UpdateCommentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCommentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCommentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCommentResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateCommentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCommentResponsePb pb = mapper.readValue(p, UpdateCommentResponsePb.class); + return UpdateCommentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponsePb.java new file mode 100755 index 000000000..982a06f11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateCommentResponsePb { + @JsonProperty("comment") + private CommentObject comment; + + public UpdateCommentResponsePb setComment(CommentObject comment) { + this.comment = comment; + return this; + } + + public CommentObject getComment() { + return comment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCommentResponsePb that = (UpdateCommentResponsePb) o; + return Objects.equals(comment, that.comment); + } + + @Override + public int hashCode() { + return Objects.hash(comment); + } + + @Override + public String toString() { + return new ToStringer(UpdateCommentResponsePb.class).add("comment", comment).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java index 81c894524..cb2806930 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExperiment.UpdateExperimentSerializer.class) +@JsonDeserialize(using = UpdateExperiment.UpdateExperimentDeserializer.class) public class UpdateExperiment { /** ID of the associated experiment. */ - @JsonProperty("experiment_id") private String experimentId; /** If provided, the experiment's name is changed to the new name. The new name must be unique. */ - @JsonProperty("new_name") private String newName; public UpdateExperiment setExperimentId(String experimentId) { @@ -55,4 +64,40 @@ public String toString() { .add("newName", newName) .toString(); } + + UpdateExperimentPb toPb() { + UpdateExperimentPb pb = new UpdateExperimentPb(); + pb.setExperimentId(experimentId); + pb.setNewName(newName); + + return pb; + } + + static UpdateExperiment fromPb(UpdateExperimentPb pb) { + UpdateExperiment model = new UpdateExperiment(); + model.setExperimentId(pb.getExperimentId()); + model.setNewName(pb.getNewName()); + + return model; + } + + public static class UpdateExperimentSerializer extends JsonSerializer { + @Override + public void serialize(UpdateExperiment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExperimentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExperimentDeserializer extends JsonDeserializer { + @Override + public UpdateExperiment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExperimentPb pb = mapper.readValue(p, UpdateExperimentPb.class); + return UpdateExperiment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentPb.java new file mode 100755 index 000000000..8342b691d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateExperimentPb { + @JsonProperty("experiment_id") + private String experimentId; + + @JsonProperty("new_name") + private String newName; + + public UpdateExperimentPb setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public UpdateExperimentPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExperimentPb that = (UpdateExperimentPb) o; + return Objects.equals(experimentId, that.experimentId) && Objects.equals(newName, that.newName); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, newName); + } + + @Override + public String toString() { + return new ToStringer(UpdateExperimentPb.class) + .add("experimentId", experimentId) + .add("newName", newName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java index 4ee79bb74..9685d1c02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateExperimentResponse.UpdateExperimentResponseSerializer.class) +@JsonDeserialize(using = UpdateExperimentResponse.UpdateExperimentResponseDeserializer.class) public class UpdateExperimentResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateExperimentResponse.class).toString(); } + + UpdateExperimentResponsePb toPb() { + UpdateExperimentResponsePb pb = new UpdateExperimentResponsePb(); + + return pb; + } + + static UpdateExperimentResponse fromPb(UpdateExperimentResponsePb pb) { + UpdateExperimentResponse model = new UpdateExperimentResponse(); + + return model; + } + + public static class UpdateExperimentResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateExperimentResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateExperimentResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateExperimentResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateExperimentResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateExperimentResponsePb pb = mapper.readValue(p, UpdateExperimentResponsePb.class); + return UpdateExperimentResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponsePb.java new file mode 100755 index 000000000..33f5701d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateExperimentResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateExperimentResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java index 42856bab6..26a3712ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateModelRequest.UpdateModelRequestSerializer.class) +@JsonDeserialize(using = UpdateModelRequest.UpdateModelRequestDeserializer.class) public class UpdateModelRequest { /** If provided, updates the description for this `registered_model`. */ - @JsonProperty("description") private String description; /** Registered model unique name identifier. */ - @JsonProperty("name") private String name; public UpdateModelRequest setDescription(String description) { @@ -55,4 +64,40 @@ public String toString() { .add("name", name) .toString(); } + + UpdateModelRequestPb toPb() { + UpdateModelRequestPb pb = new UpdateModelRequestPb(); + pb.setDescription(description); + pb.setName(name); + + return pb; + } + + static UpdateModelRequest fromPb(UpdateModelRequestPb pb) { + UpdateModelRequest model = new UpdateModelRequest(); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateModelRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateModelRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateModelRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateModelRequestDeserializer extends JsonDeserializer { + @Override + public UpdateModelRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateModelRequestPb pb = mapper.readValue(p, UpdateModelRequestPb.class); + return UpdateModelRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequestPb.java new file mode 100755 index 000000000..e45c1e080 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateModelRequestPb { + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + public UpdateModelRequestPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public UpdateModelRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateModelRequestPb that = (UpdateModelRequestPb) o; + return Objects.equals(description, that.description) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(description, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateModelRequestPb.class) + .add("description", description) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java index 759e6d5e7..03db3e05f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateModelResponse.UpdateModelResponseSerializer.class) +@JsonDeserialize(using = UpdateModelResponse.UpdateModelResponseDeserializer.class) public class UpdateModelResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateModelResponse.class).toString(); } + + UpdateModelResponsePb toPb() { + UpdateModelResponsePb pb = new UpdateModelResponsePb(); + + return pb; + } + + static UpdateModelResponse fromPb(UpdateModelResponsePb pb) { + UpdateModelResponse model = new UpdateModelResponse(); + + return model; + } + + public static class UpdateModelResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateModelResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateModelResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateModelResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateModelResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateModelResponsePb pb = mapper.readValue(p, UpdateModelResponsePb.class); + return UpdateModelResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponsePb.java new file mode 100755 index 000000000..72c0bf113 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateModelResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateModelResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java index e0b689cfc..f84e10479 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateModelVersionRequest.UpdateModelVersionRequestSerializer.class) +@JsonDeserialize(using = UpdateModelVersionRequest.UpdateModelVersionRequestDeserializer.class) public class UpdateModelVersionRequest { /** If provided, updates the description for this `registered_model`. */ - @JsonProperty("description") private String description; /** Name of the registered model */ - @JsonProperty("name") private String name; /** Model version number */ - @JsonProperty("version") private String version; public UpdateModelVersionRequest setDescription(String description) { @@ -71,4 +79,45 @@ public String toString() { .add("version", version) .toString(); } + + UpdateModelVersionRequestPb toPb() { + UpdateModelVersionRequestPb pb = new UpdateModelVersionRequestPb(); + pb.setDescription(description); + pb.setName(name); + pb.setVersion(version); + + return pb; + } + + static UpdateModelVersionRequest fromPb(UpdateModelVersionRequestPb pb) { + UpdateModelVersionRequest model = new UpdateModelVersionRequest(); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setVersion(pb.getVersion()); + + return model; + } + + public static class UpdateModelVersionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateModelVersionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateModelVersionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateModelVersionRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateModelVersionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateModelVersionRequestPb pb = mapper.readValue(p, UpdateModelVersionRequestPb.class); + return UpdateModelVersionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequestPb.java new file mode 100755 index 000000000..61aedba53 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateModelVersionRequestPb { + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("version") + private String version; + + public UpdateModelVersionRequestPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public UpdateModelVersionRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateModelVersionRequestPb setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateModelVersionRequestPb that = (UpdateModelVersionRequestPb) o; + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(description, name, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateModelVersionRequestPb.class) + .add("description", description) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java index acdc9d1cc..49a0148db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateModelVersionResponse.UpdateModelVersionResponseSerializer.class) +@JsonDeserialize(using = UpdateModelVersionResponse.UpdateModelVersionResponseDeserializer.class) public class UpdateModelVersionResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateModelVersionResponse.class).toString(); } + + UpdateModelVersionResponsePb toPb() { + UpdateModelVersionResponsePb pb = new UpdateModelVersionResponsePb(); + + return pb; + } + + static UpdateModelVersionResponse fromPb(UpdateModelVersionResponsePb pb) { + UpdateModelVersionResponse model = new UpdateModelVersionResponse(); + + return model; + } + + public static class UpdateModelVersionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateModelVersionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateModelVersionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateModelVersionResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateModelVersionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateModelVersionResponsePb pb = mapper.readValue(p, UpdateModelVersionResponsePb.class); + return UpdateModelVersionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponsePb.java new file mode 100755 index 000000000..0b7e19a95 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateModelVersionResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateModelVersionResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java index 285496290..a3388dab1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRegistryWebhook.UpdateRegistryWebhookSerializer.class) +@JsonDeserialize(using = UpdateRegistryWebhook.UpdateRegistryWebhookDeserializer.class) public class UpdateRegistryWebhook { /** User-specified description for the webhook. */ - @JsonProperty("description") private String description; /** @@ -45,19 +55,15 @@ public class UpdateRegistryWebhook { * *

* `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. */ - @JsonProperty("events") private Collection events; /** */ - @JsonProperty("http_url_spec") private HttpUrlSpec httpUrlSpec; /** Webhook ID */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("job_spec") private JobSpec jobSpec; /** @@ -69,7 +75,6 @@ public class UpdateRegistryWebhook { *

* `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a * real event. */ - @JsonProperty("status") private RegistryWebhookStatus status; public UpdateRegistryWebhook setDescription(String description) { @@ -155,4 +160,51 @@ public String toString() { .add("status", status) .toString(); } + + UpdateRegistryWebhookPb toPb() { + UpdateRegistryWebhookPb pb = new UpdateRegistryWebhookPb(); + pb.setDescription(description); + pb.setEvents(events); + pb.setHttpUrlSpec(httpUrlSpec); + pb.setId(id); + pb.setJobSpec(jobSpec); + pb.setStatus(status); + + return pb; + } + + static UpdateRegistryWebhook fromPb(UpdateRegistryWebhookPb pb) { + UpdateRegistryWebhook model = new UpdateRegistryWebhook(); + model.setDescription(pb.getDescription()); + model.setEvents(pb.getEvents()); + model.setHttpUrlSpec(pb.getHttpUrlSpec()); + model.setId(pb.getId()); + model.setJobSpec(pb.getJobSpec()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class UpdateRegistryWebhookSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateRegistryWebhook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRegistryWebhookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRegistryWebhookDeserializer + extends JsonDeserializer { + @Override + public UpdateRegistryWebhook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRegistryWebhookPb pb = mapper.readValue(p, UpdateRegistryWebhookPb.class); + return UpdateRegistryWebhook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhookPb.java new file mode 100755 index 000000000..8273d1999 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhookPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateRegistryWebhookPb { + @JsonProperty("description") + private String description; + + @JsonProperty("events") + private Collection events; + + @JsonProperty("http_url_spec") + private HttpUrlSpec httpUrlSpec; + + @JsonProperty("id") + private String id; + + @JsonProperty("job_spec") + private JobSpec jobSpec; + + @JsonProperty("status") + private RegistryWebhookStatus status; + + public UpdateRegistryWebhookPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public UpdateRegistryWebhookPb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public UpdateRegistryWebhookPb setHttpUrlSpec(HttpUrlSpec httpUrlSpec) { + this.httpUrlSpec = httpUrlSpec; + return this; + } + + public HttpUrlSpec getHttpUrlSpec() { + return httpUrlSpec; + } + + public UpdateRegistryWebhookPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateRegistryWebhookPb setJobSpec(JobSpec jobSpec) { + this.jobSpec = jobSpec; + return this; + } + + public JobSpec getJobSpec() { + return jobSpec; + } + + public UpdateRegistryWebhookPb setStatus(RegistryWebhookStatus status) { + this.status = status; + return this; + } + + public RegistryWebhookStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRegistryWebhookPb that = (UpdateRegistryWebhookPb) o; + return Objects.equals(description, that.description) + && Objects.equals(events, that.events) + && Objects.equals(httpUrlSpec, that.httpUrlSpec) + && Objects.equals(id, that.id) + && Objects.equals(jobSpec, that.jobSpec) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(description, events, httpUrlSpec, id, jobSpec, status); + } + + @Override + public String toString() { + return new ToStringer(UpdateRegistryWebhookPb.class) + .add("description", description) + .add("events", events) + .add("httpUrlSpec", httpUrlSpec) + .add("id", id) + .add("jobSpec", jobSpec) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java index 738e2d982..e82fb10d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRun.UpdateRunSerializer.class) +@JsonDeserialize(using = UpdateRun.UpdateRunDeserializer.class) public class UpdateRun { /** Unix timestamp in milliseconds of when the run ended. */ - @JsonProperty("end_time") private Long endTime; /** ID of the run to update. Must be provided. */ - @JsonProperty("run_id") private String runId; /** Updated name of the run. */ - @JsonProperty("run_name") private String runName; /** * [Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a * future MLflow version. */ - @JsonProperty("run_uuid") private String runUuid; /** Updated status of the run. */ - @JsonProperty("status") private UpdateRunStatus status; public UpdateRun setEndTime(Long endTime) { @@ -104,4 +110,45 @@ public String toString() { .add("status", status) .toString(); } + + UpdateRunPb toPb() { + UpdateRunPb pb = new UpdateRunPb(); + pb.setEndTime(endTime); + pb.setRunId(runId); + pb.setRunName(runName); + pb.setRunUuid(runUuid); + pb.setStatus(status); + + return pb; + } + + static UpdateRun fromPb(UpdateRunPb pb) { + UpdateRun model = new UpdateRun(); + model.setEndTime(pb.getEndTime()); + model.setRunId(pb.getRunId()); + model.setRunName(pb.getRunName()); + model.setRunUuid(pb.getRunUuid()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class UpdateRunSerializer extends JsonSerializer { + @Override + public void serialize(UpdateRun value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRunPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRunDeserializer extends JsonDeserializer { + @Override + public UpdateRun deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRunPb pb = mapper.readValue(p, UpdateRunPb.class); + return UpdateRun.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunPb.java new file mode 100755 index 000000000..454780733 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRunPb { + @JsonProperty("end_time") + private Long endTime; + + @JsonProperty("run_id") + private String runId; + + @JsonProperty("run_name") + private String runName; + + @JsonProperty("run_uuid") + private String runUuid; + + @JsonProperty("status") + private UpdateRunStatus status; + + public UpdateRunPb setEndTime(Long endTime) { + this.endTime = endTime; + return this; + } + + public Long getEndTime() { + return endTime; + } + + public UpdateRunPb setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public UpdateRunPb setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + + public UpdateRunPb setRunUuid(String runUuid) { + this.runUuid = runUuid; + return this; + } + + public String getRunUuid() { + return runUuid; + } + + public UpdateRunPb setStatus(UpdateRunStatus status) { + this.status = status; + return this; + } + + public UpdateRunStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRunPb that = (UpdateRunPb) o; + return Objects.equals(endTime, that.endTime) + && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) + && Objects.equals(runUuid, that.runUuid) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(endTime, runId, runName, runUuid, status); + } + + @Override + public String toString() { + return new ToStringer(UpdateRunPb.class) + .add("endTime", endTime) + .add("runId", runId) + .add("runName", runName) + .add("runUuid", runUuid) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java index b70a1c041..a44eabf70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRunResponse.UpdateRunResponseSerializer.class) +@JsonDeserialize(using = UpdateRunResponse.UpdateRunResponseDeserializer.class) public class UpdateRunResponse { /** Updated metadata of the run. */ - @JsonProperty("run_info") private RunInfo runInfo; public UpdateRunResponse setRunInfo(RunInfo runInfo) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateRunResponse.class).add("runInfo", runInfo).toString(); } + + UpdateRunResponsePb toPb() { + UpdateRunResponsePb pb = new UpdateRunResponsePb(); + pb.setRunInfo(runInfo); + + return pb; + } + + static UpdateRunResponse fromPb(UpdateRunResponsePb pb) { + UpdateRunResponse model = new UpdateRunResponse(); + model.setRunInfo(pb.getRunInfo()); + + return model; + } + + public static class UpdateRunResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateRunResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRunResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRunResponseDeserializer extends JsonDeserializer { + @Override + public UpdateRunResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRunResponsePb pb = mapper.readValue(p, UpdateRunResponsePb.class); + return UpdateRunResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponsePb.java new file mode 100755 index 000000000..8f9ceca22 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRunResponsePb { + @JsonProperty("run_info") + private RunInfo runInfo; + + public UpdateRunResponsePb setRunInfo(RunInfo runInfo) { + this.runInfo = runInfo; + return this; + } + + public RunInfo getRunInfo() { + return runInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRunResponsePb that = (UpdateRunResponsePb) o; + return Objects.equals(runInfo, that.runInfo); + } + + @Override + public int hashCode() { + return Objects.hash(runInfo); + } + + @Override + public String toString() { + return new ToStringer(UpdateRunResponsePb.class).add("runInfo", runInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java index 64b5de737..af66adf28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateWebhookResponse.UpdateWebhookResponseSerializer.class) +@JsonDeserialize(using = UpdateWebhookResponse.UpdateWebhookResponseDeserializer.class) public class UpdateWebhookResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateWebhookResponse.class).toString(); } + + UpdateWebhookResponsePb toPb() { + UpdateWebhookResponsePb pb = new UpdateWebhookResponsePb(); + + return pb; + } + + static UpdateWebhookResponse fromPb(UpdateWebhookResponsePb pb) { + UpdateWebhookResponse model = new UpdateWebhookResponse(); + + return model; + } + + public static class UpdateWebhookResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWebhookResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWebhookResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWebhookResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateWebhookResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWebhookResponsePb pb = mapper.readValue(p, UpdateWebhookResponsePb.class); + return UpdateWebhookResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponsePb.java new file mode 100755 index 000000000..b833c8696 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateWebhookResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateWebhookResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java index e61b618f6..f4f4a4ae3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java @@ -22,7 +22,7 @@ public FederationPolicy create(CreateAccountFederationPolicyRequest request) { String.format("/api/2.0/accounts/%s/federationPolicies", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); @@ -39,7 +39,7 @@ public void delete(DeleteAccountFederationPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public FederationPolicy get(GetAccountFederationPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FederationPolicy.class); } catch (IOException e) { @@ -69,7 +69,7 @@ public ListFederationPoliciesResponse list(ListAccountFederationPoliciesRequest String.format("/api/2.0/accounts/%s/federationPolicies", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFederationPoliciesResponse.class); } catch (IOException e) { @@ -85,7 +85,7 @@ public FederationPolicy update(UpdateAccountFederationPolicyRequest request) { apiClient.configuredAccountID(), request.getPolicyId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/Converters.java new file mode 100755 index 000000000..e6c3b3429 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.oauth2; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java index f4641952e..27579423c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java @@ -3,17 +3,29 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create account federation policy */ @Generated +@JsonSerialize( + using = + CreateAccountFederationPolicyRequest.CreateAccountFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + CreateAccountFederationPolicyRequest.CreateAccountFederationPolicyRequestDeserializer.class) public class CreateAccountFederationPolicyRequest { /** */ - @JsonProperty("policy") private FederationPolicy policy; /** @@ -21,8 +33,6 @@ public class CreateAccountFederationPolicyRequest { * alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned * by Databricks. */ - @JsonIgnore - @QueryParam("policy_id") private String policyId; public CreateAccountFederationPolicyRequest setPolicy(FederationPolicy policy) { @@ -63,4 +73,44 @@ public String toString() { .add("policyId", policyId) .toString(); } + + CreateAccountFederationPolicyRequestPb toPb() { + CreateAccountFederationPolicyRequestPb pb = new CreateAccountFederationPolicyRequestPb(); + pb.setPolicy(policy); + pb.setPolicyId(policyId); + + return pb; + } + + static CreateAccountFederationPolicyRequest fromPb(CreateAccountFederationPolicyRequestPb pb) { + CreateAccountFederationPolicyRequest model = new CreateAccountFederationPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class CreateAccountFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateAccountFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAccountFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAccountFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateAccountFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAccountFederationPolicyRequestPb pb = + mapper.readValue(p, CreateAccountFederationPolicyRequestPb.class); + return CreateAccountFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequestPb.java new file mode 100755 index 000000000..b303eff1b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create account federation policy */ +@Generated +class CreateAccountFederationPolicyRequestPb { + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore + @QueryParam("policy_id") + private String policyId; + + public CreateAccountFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public CreateAccountFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAccountFederationPolicyRequestPb that = (CreateAccountFederationPolicyRequestPb) o; + return Objects.equals(policy, that.policy) && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policy, policyId); + } + + @Override + public String toString() { + return new ToStringer(CreateAccountFederationPolicyRequestPb.class) + .add("policy", policy) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java index a1c47bb8a..4e2d1cde4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java @@ -4,42 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCustomAppIntegration.CreateCustomAppIntegrationSerializer.class) +@JsonDeserialize(using = CreateCustomAppIntegration.CreateCustomAppIntegrationDeserializer.class) public class CreateCustomAppIntegration { /** * This field indicates whether an OAuth client secret is required to authenticate this client. */ - @JsonProperty("confidential") private Boolean confidential; /** Name of the custom OAuth app */ - @JsonProperty("name") private String name; /** List of OAuth redirect urls */ - @JsonProperty("redirect_urls") private Collection redirectUrls; /** * OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, * openid, profile, email. */ - @JsonProperty("scopes") private Collection scopes; /** Token access policy */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; /** * Scopes that will need to be consented by end user to mint the access token. If the user does * not authorize the access token will not be minted. Must be a subset of scopes. */ - @JsonProperty("user_authorized_scopes") private Collection userAuthorizedScopes; public CreateCustomAppIntegration setConfidential(Boolean confidential) { @@ -127,4 +132,51 @@ public String toString() { .add("userAuthorizedScopes", userAuthorizedScopes) .toString(); } + + CreateCustomAppIntegrationPb toPb() { + CreateCustomAppIntegrationPb pb = new CreateCustomAppIntegrationPb(); + pb.setConfidential(confidential); + pb.setName(name); + pb.setRedirectUrls(redirectUrls); + pb.setScopes(scopes); + pb.setTokenAccessPolicy(tokenAccessPolicy); + pb.setUserAuthorizedScopes(userAuthorizedScopes); + + return pb; + } + + static CreateCustomAppIntegration fromPb(CreateCustomAppIntegrationPb pb) { + CreateCustomAppIntegration model = new CreateCustomAppIntegration(); + model.setConfidential(pb.getConfidential()); + model.setName(pb.getName()); + model.setRedirectUrls(pb.getRedirectUrls()); + model.setScopes(pb.getScopes()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + model.setUserAuthorizedScopes(pb.getUserAuthorizedScopes()); + + return model; + } + + public static class CreateCustomAppIntegrationSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCustomAppIntegration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCustomAppIntegrationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCustomAppIntegrationDeserializer + extends JsonDeserializer { + @Override + public CreateCustomAppIntegration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCustomAppIntegrationPb pb = mapper.readValue(p, CreateCustomAppIntegrationPb.class); + return CreateCustomAppIntegration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutput.java index 7ea7eeccf..0ae5a73a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutput.java @@ -4,24 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateCustomAppIntegrationOutput.CreateCustomAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = CreateCustomAppIntegrationOutput.CreateCustomAppIntegrationOutputDeserializer.class) public class CreateCustomAppIntegrationOutput { /** OAuth client-id generated by the Databricks */ - @JsonProperty("client_id") private String clientId; /** * OAuth client-secret generated by the Databricks. If this is a confidential OAuth app * client-secret will be generated. */ - @JsonProperty("client_secret") private String clientSecret; /** Unique integration id for the custom OAuth app */ - @JsonProperty("integration_id") private String integrationId; public CreateCustomAppIntegrationOutput setClientId(String clientId) { @@ -74,4 +84,46 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + CreateCustomAppIntegrationOutputPb toPb() { + CreateCustomAppIntegrationOutputPb pb = new CreateCustomAppIntegrationOutputPb(); + pb.setClientId(clientId); + pb.setClientSecret(clientSecret); + pb.setIntegrationId(integrationId); + + return pb; + } + + static CreateCustomAppIntegrationOutput fromPb(CreateCustomAppIntegrationOutputPb pb) { + CreateCustomAppIntegrationOutput model = new CreateCustomAppIntegrationOutput(); + model.setClientId(pb.getClientId()); + model.setClientSecret(pb.getClientSecret()); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class CreateCustomAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCustomAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCustomAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCustomAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public CreateCustomAppIntegrationOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCustomAppIntegrationOutputPb pb = + mapper.readValue(p, CreateCustomAppIntegrationOutputPb.class); + return CreateCustomAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutputPb.java new file mode 100755 index 000000000..f9101e57f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationOutputPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCustomAppIntegrationOutputPb { + @JsonProperty("client_id") + private String clientId; + + @JsonProperty("client_secret") + private String clientSecret; + + @JsonProperty("integration_id") + private String integrationId; + + public CreateCustomAppIntegrationOutputPb setClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public String getClientId() { + return clientId; + } + + public CreateCustomAppIntegrationOutputPb setClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + return this; + } + + public String getClientSecret() { + return clientSecret; + } + + public CreateCustomAppIntegrationOutputPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCustomAppIntegrationOutputPb that = (CreateCustomAppIntegrationOutputPb) o; + return Objects.equals(clientId, that.clientId) + && Objects.equals(clientSecret, that.clientSecret) + && Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(clientId, clientSecret, integrationId); + } + + @Override + public String toString() { + return new ToStringer(CreateCustomAppIntegrationOutputPb.class) + .add("clientId", clientId) + .add("clientSecret", clientSecret) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationPb.java new file mode 100755 index 000000000..3a6a20af0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegrationPb.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateCustomAppIntegrationPb { + @JsonProperty("confidential") + private Boolean confidential; + + @JsonProperty("name") + private String name; + + @JsonProperty("redirect_urls") + private Collection redirectUrls; + + @JsonProperty("scopes") + private Collection scopes; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + @JsonProperty("user_authorized_scopes") + private Collection userAuthorizedScopes; + + public CreateCustomAppIntegrationPb setConfidential(Boolean confidential) { + this.confidential = confidential; + return this; + } + + public Boolean getConfidential() { + return confidential; + } + + public CreateCustomAppIntegrationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCustomAppIntegrationPb setRedirectUrls(Collection redirectUrls) { + this.redirectUrls = redirectUrls; + return this; + } + + public Collection getRedirectUrls() { + return redirectUrls; + } + + public CreateCustomAppIntegrationPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + public CreateCustomAppIntegrationPb setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + public CreateCustomAppIntegrationPb setUserAuthorizedScopes( + Collection userAuthorizedScopes) { + this.userAuthorizedScopes = userAuthorizedScopes; + return this; + } + + public Collection getUserAuthorizedScopes() { + return userAuthorizedScopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCustomAppIntegrationPb that = (CreateCustomAppIntegrationPb) o; + return Objects.equals(confidential, that.confidential) + && Objects.equals(name, that.name) + && Objects.equals(redirectUrls, that.redirectUrls) + && Objects.equals(scopes, that.scopes) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy) + && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes); + } + + @Override + public int hashCode() { + return Objects.hash( + confidential, name, redirectUrls, scopes, tokenAccessPolicy, userAuthorizedScopes); + } + + @Override + public String toString() { + return new ToStringer(CreateCustomAppIntegrationPb.class) + .add("confidential", confidential) + .add("name", name) + .add("redirectUrls", redirectUrls) + .add("scopes", scopes) + .add("tokenAccessPolicy", tokenAccessPolicy) + .add("userAuthorizedScopes", userAuthorizedScopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegration.java index d26b2c24e..6f2209b66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegration.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePublishedAppIntegration.CreatePublishedAppIntegrationSerializer.class) +@JsonDeserialize( + using = CreatePublishedAppIntegration.CreatePublishedAppIntegrationDeserializer.class) public class CreatePublishedAppIntegration { /** App id of the OAuth published app integration. For example power-bi, tableau-deskop */ - @JsonProperty("app_id") private String appId; /** Token access policy */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; public CreatePublishedAppIntegration setAppId(String appId) { @@ -56,4 +66,44 @@ public String toString() { .add("tokenAccessPolicy", tokenAccessPolicy) .toString(); } + + CreatePublishedAppIntegrationPb toPb() { + CreatePublishedAppIntegrationPb pb = new CreatePublishedAppIntegrationPb(); + pb.setAppId(appId); + pb.setTokenAccessPolicy(tokenAccessPolicy); + + return pb; + } + + static CreatePublishedAppIntegration fromPb(CreatePublishedAppIntegrationPb pb) { + CreatePublishedAppIntegration model = new CreatePublishedAppIntegration(); + model.setAppId(pb.getAppId()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + + return model; + } + + public static class CreatePublishedAppIntegrationSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePublishedAppIntegration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePublishedAppIntegrationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePublishedAppIntegrationDeserializer + extends JsonDeserializer { + @Override + public CreatePublishedAppIntegration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePublishedAppIntegrationPb pb = + mapper.readValue(p, CreatePublishedAppIntegrationPb.class); + return CreatePublishedAppIntegration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutput.java index 669a9d976..164c9eeb9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutput.java @@ -4,13 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreatePublishedAppIntegrationOutput.CreatePublishedAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = + CreatePublishedAppIntegrationOutput.CreatePublishedAppIntegrationOutputDeserializer.class) public class CreatePublishedAppIntegrationOutput { /** Unique integration id for the published OAuth app */ - @JsonProperty("integration_id") private String integrationId; public CreatePublishedAppIntegrationOutput setIntegrationId(String integrationId) { @@ -41,4 +54,42 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + CreatePublishedAppIntegrationOutputPb toPb() { + CreatePublishedAppIntegrationOutputPb pb = new CreatePublishedAppIntegrationOutputPb(); + pb.setIntegrationId(integrationId); + + return pb; + } + + static CreatePublishedAppIntegrationOutput fromPb(CreatePublishedAppIntegrationOutputPb pb) { + CreatePublishedAppIntegrationOutput model = new CreatePublishedAppIntegrationOutput(); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class CreatePublishedAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePublishedAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePublishedAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePublishedAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public CreatePublishedAppIntegrationOutput deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePublishedAppIntegrationOutputPb pb = + mapper.readValue(p, CreatePublishedAppIntegrationOutputPb.class); + return CreatePublishedAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutputPb.java new file mode 100755 index 000000000..a0eaa3750 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationOutputPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePublishedAppIntegrationOutputPb { + @JsonProperty("integration_id") + private String integrationId; + + public CreatePublishedAppIntegrationOutputPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePublishedAppIntegrationOutputPb that = (CreatePublishedAppIntegrationOutputPb) o; + return Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId); + } + + @Override + public String toString() { + return new ToStringer(CreatePublishedAppIntegrationOutputPb.class) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationPb.java new file mode 100755 index 000000000..2494c6a92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreatePublishedAppIntegrationPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePublishedAppIntegrationPb { + @JsonProperty("app_id") + private String appId; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + public CreatePublishedAppIntegrationPb setAppId(String appId) { + this.appId = appId; + return this; + } + + public String getAppId() { + return appId; + } + + public CreatePublishedAppIntegrationPb setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePublishedAppIntegrationPb that = (CreatePublishedAppIntegrationPb) o; + return Objects.equals(appId, that.appId) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(appId, tokenAccessPolicy); + } + + @Override + public String toString() { + return new ToStringer(CreatePublishedAppIntegrationPb.class) + .add("appId", appId) + .add("tokenAccessPolicy", tokenAccessPolicy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java index 517a5f08e..6dd3d2f56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java @@ -3,17 +3,31 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create service principal federation policy */ @Generated +@JsonSerialize( + using = + CreateServicePrincipalFederationPolicyRequest + .CreateServicePrincipalFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + CreateServicePrincipalFederationPolicyRequest + .CreateServicePrincipalFederationPolicyRequestDeserializer.class) public class CreateServicePrincipalFederationPolicyRequest { /** */ - @JsonProperty("policy") private FederationPolicy policy; /** @@ -21,12 +35,10 @@ public class CreateServicePrincipalFederationPolicyRequest { * alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned * by Databricks. */ - @JsonIgnore - @QueryParam("policy_id") private String policyId; /** The service principal id for the federation policy. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public CreateServicePrincipalFederationPolicyRequest setPolicy(FederationPolicy policy) { this.policy = policy; @@ -80,4 +92,51 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + CreateServicePrincipalFederationPolicyRequestPb toPb() { + CreateServicePrincipalFederationPolicyRequestPb pb = + new CreateServicePrincipalFederationPolicyRequestPb(); + pb.setPolicy(policy); + pb.setPolicyId(policyId); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static CreateServicePrincipalFederationPolicyRequest fromPb( + CreateServicePrincipalFederationPolicyRequestPb pb) { + CreateServicePrincipalFederationPolicyRequest model = + new CreateServicePrincipalFederationPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setPolicyId(pb.getPolicyId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class CreateServicePrincipalFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateServicePrincipalFederationPolicyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateServicePrincipalFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateServicePrincipalFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateServicePrincipalFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateServicePrincipalFederationPolicyRequestPb pb = + mapper.readValue(p, CreateServicePrincipalFederationPolicyRequestPb.class); + return CreateServicePrincipalFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequestPb.java new file mode 100755 index 000000000..894adcf88 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create service principal federation policy */ +@Generated +class CreateServicePrincipalFederationPolicyRequestPb { + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore + @QueryParam("policy_id") + private String policyId; + + @JsonIgnore private Long servicePrincipalId; + + public CreateServicePrincipalFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public CreateServicePrincipalFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public CreateServicePrincipalFederationPolicyRequestPb setServicePrincipalId( + Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServicePrincipalFederationPolicyRequestPb that = + (CreateServicePrincipalFederationPolicyRequestPb) o; + return Objects.equals(policy, that.policy) + && Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(policy, policyId, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(CreateServicePrincipalFederationPolicyRequestPb.class) + .add("policy", policy) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java index ea5a42d69..31a99df40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java @@ -4,21 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateServicePrincipalSecretRequest.CreateServicePrincipalSecretRequestSerializer.class) +@JsonDeserialize( + using = + CreateServicePrincipalSecretRequest.CreateServicePrincipalSecretRequestDeserializer.class) public class CreateServicePrincipalSecretRequest { /** * The lifetime of the secret in seconds. If this parameter is not provided, the secret will have * a default lifetime of 730 days (63072000s). */ - @JsonProperty("lifetime") private String lifetime; /** The service principal ID. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public CreateServicePrincipalSecretRequest setLifetime(String lifetime) { this.lifetime = lifetime; @@ -59,4 +71,44 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + CreateServicePrincipalSecretRequestPb toPb() { + CreateServicePrincipalSecretRequestPb pb = new CreateServicePrincipalSecretRequestPb(); + pb.setLifetime(lifetime); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static CreateServicePrincipalSecretRequest fromPb(CreateServicePrincipalSecretRequestPb pb) { + CreateServicePrincipalSecretRequest model = new CreateServicePrincipalSecretRequest(); + model.setLifetime(pb.getLifetime()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class CreateServicePrincipalSecretRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateServicePrincipalSecretRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateServicePrincipalSecretRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateServicePrincipalSecretRequestDeserializer + extends JsonDeserializer { + @Override + public CreateServicePrincipalSecretRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateServicePrincipalSecretRequestPb pb = + mapper.readValue(p, CreateServicePrincipalSecretRequestPb.class); + return CreateServicePrincipalSecretRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequestPb.java new file mode 100755 index 000000000..6cc833249 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateServicePrincipalSecretRequestPb { + @JsonProperty("lifetime") + private String lifetime; + + @JsonIgnore private Long servicePrincipalId; + + public CreateServicePrincipalSecretRequestPb setLifetime(String lifetime) { + this.lifetime = lifetime; + return this; + } + + public String getLifetime() { + return lifetime; + } + + public CreateServicePrincipalSecretRequestPb setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServicePrincipalSecretRequestPb that = (CreateServicePrincipalSecretRequestPb) o; + return Objects.equals(lifetime, that.lifetime) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(lifetime, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(CreateServicePrincipalSecretRequestPb.class) + .add("lifetime", lifetime) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java index 639e5889f..2874ebcda 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java @@ -4,39 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateServicePrincipalSecretResponse.CreateServicePrincipalSecretResponseSerializer.class) +@JsonDeserialize( + using = + CreateServicePrincipalSecretResponse.CreateServicePrincipalSecretResponseDeserializer.class) public class CreateServicePrincipalSecretResponse { /** UTC time when the secret was created */ - @JsonProperty("create_time") private String createTime; /** * UTC time when the secret will expire. If the field is not present, the secret does not expire. */ - @JsonProperty("expire_time") private String expireTime; /** ID of the secret */ - @JsonProperty("id") private String id; /** Secret Value */ - @JsonProperty("secret") private String secret; /** Secret Hash */ - @JsonProperty("secret_hash") private String secretHash; /** Status of the secret */ - @JsonProperty("status") private String status; /** UTC time when the secret was updated */ - @JsonProperty("update_time") private String updateTime; public CreateServicePrincipalSecretResponse setCreateTime(String createTime) { @@ -133,4 +141,54 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + CreateServicePrincipalSecretResponsePb toPb() { + CreateServicePrincipalSecretResponsePb pb = new CreateServicePrincipalSecretResponsePb(); + pb.setCreateTime(createTime); + pb.setExpireTime(expireTime); + pb.setId(id); + pb.setSecret(secret); + pb.setSecretHash(secretHash); + pb.setStatus(status); + pb.setUpdateTime(updateTime); + + return pb; + } + + static CreateServicePrincipalSecretResponse fromPb(CreateServicePrincipalSecretResponsePb pb) { + CreateServicePrincipalSecretResponse model = new CreateServicePrincipalSecretResponse(); + model.setCreateTime(pb.getCreateTime()); + model.setExpireTime(pb.getExpireTime()); + model.setId(pb.getId()); + model.setSecret(pb.getSecret()); + model.setSecretHash(pb.getSecretHash()); + model.setStatus(pb.getStatus()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class CreateServicePrincipalSecretResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateServicePrincipalSecretResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateServicePrincipalSecretResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateServicePrincipalSecretResponseDeserializer + extends JsonDeserializer { + @Override + public CreateServicePrincipalSecretResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateServicePrincipalSecretResponsePb pb = + mapper.readValue(p, CreateServicePrincipalSecretResponsePb.class); + return CreateServicePrincipalSecretResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponsePb.java new file mode 100755 index 000000000..017c95432 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponsePb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateServicePrincipalSecretResponsePb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("expire_time") + private String expireTime; + + @JsonProperty("id") + private String id; + + @JsonProperty("secret") + private String secret; + + @JsonProperty("secret_hash") + private String secretHash; + + @JsonProperty("status") + private String status; + + @JsonProperty("update_time") + private String updateTime; + + public CreateServicePrincipalSecretResponsePb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public CreateServicePrincipalSecretResponsePb setExpireTime(String expireTime) { + this.expireTime = expireTime; + return this; + } + + public String getExpireTime() { + return expireTime; + } + + public CreateServicePrincipalSecretResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CreateServicePrincipalSecretResponsePb setSecret(String secret) { + this.secret = secret; + return this; + } + + public String getSecret() { + return secret; + } + + public CreateServicePrincipalSecretResponsePb setSecretHash(String secretHash) { + this.secretHash = secretHash; + return this; + } + + public String getSecretHash() { + return secretHash; + } + + public CreateServicePrincipalSecretResponsePb setStatus(String status) { + this.status = status; + return this; + } + + public String getStatus() { + return status; + } + + public CreateServicePrincipalSecretResponsePb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServicePrincipalSecretResponsePb that = (CreateServicePrincipalSecretResponsePb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(expireTime, that.expireTime) + && Objects.equals(id, that.id) + && Objects.equals(secret, that.secret) + && Objects.equals(secretHash, that.secretHash) + && Objects.equals(status, that.status) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash(createTime, expireTime, id, secret, secretHash, status, updateTime); + } + + @Override + public String toString() { + return new ToStringer(CreateServicePrincipalSecretResponsePb.class) + .add("createTime", createTime) + .add("expireTime", expireTime) + .add("id", id) + .add("secret", secret) + .add("secretHash", secretHash) + .add("status", status) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java index 3aa7acba1..ebfc46467 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java @@ -23,7 +23,7 @@ public CreateCustomAppIntegrationOutput create(CreateCustomAppIntegration reques "/api/2.0/accounts/%s/oauth2/custom-app-integrations", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateCustomAppIntegrationOutput.class); @@ -40,7 +40,7 @@ public void delete(DeleteCustomAppIntegrationRequest request) { apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteCustomAppIntegrationOutput.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public GetCustomAppIntegrationOutput get(GetCustomAppIntegrationRequest request) apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetCustomAppIntegrationOutput.class); } catch (IOException e) { @@ -71,7 +71,7 @@ public GetCustomAppIntegrationsOutput list(ListCustomAppIntegrationsRequest requ "/api/2.0/accounts/%s/oauth2/custom-app-integrations", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetCustomAppIntegrationsOutput.class); } catch (IOException e) { @@ -87,7 +87,7 @@ public void update(UpdateCustomAppIntegration request) { apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateCustomAppIntegrationOutput.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java index ab0bb9b5b..271fac7e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete account federation policy */ @Generated +@JsonSerialize( + using = + DeleteAccountFederationPolicyRequest.DeleteAccountFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + DeleteAccountFederationPolicyRequest.DeleteAccountFederationPolicyRequestDeserializer.class) public class DeleteAccountFederationPolicyRequest { /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; public DeleteAccountFederationPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -41,4 +56,42 @@ public String toString() { .add("policyId", policyId) .toString(); } + + DeleteAccountFederationPolicyRequestPb toPb() { + DeleteAccountFederationPolicyRequestPb pb = new DeleteAccountFederationPolicyRequestPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static DeleteAccountFederationPolicyRequest fromPb(DeleteAccountFederationPolicyRequestPb pb) { + DeleteAccountFederationPolicyRequest model = new DeleteAccountFederationPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class DeleteAccountFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountFederationPolicyRequestPb pb = + mapper.readValue(p, DeleteAccountFederationPolicyRequestPb.class); + return DeleteAccountFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequestPb.java new file mode 100755 index 000000000..fddbae98d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete account federation policy */ +@Generated +class DeleteAccountFederationPolicyRequestPb { + @JsonIgnore private String policyId; + + public DeleteAccountFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountFederationPolicyRequestPb that = (DeleteAccountFederationPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountFederationPolicyRequestPb.class) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java index 9739ec0e8..1c86413b1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DeleteCustomAppIntegrationOutput.DeleteCustomAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = DeleteCustomAppIntegrationOutput.DeleteCustomAppIntegrationOutputDeserializer.class) public class DeleteCustomAppIntegrationOutput { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCustomAppIntegrationOutput.class).toString(); } + + DeleteCustomAppIntegrationOutputPb toPb() { + DeleteCustomAppIntegrationOutputPb pb = new DeleteCustomAppIntegrationOutputPb(); + + return pb; + } + + static DeleteCustomAppIntegrationOutput fromPb(DeleteCustomAppIntegrationOutputPb pb) { + DeleteCustomAppIntegrationOutput model = new DeleteCustomAppIntegrationOutput(); + + return model; + } + + public static class DeleteCustomAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCustomAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCustomAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCustomAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public DeleteCustomAppIntegrationOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCustomAppIntegrationOutputPb pb = + mapper.readValue(p, DeleteCustomAppIntegrationOutputPb.class); + return DeleteCustomAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutputPb.java new file mode 100755 index 000000000..38afc0d8c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutputPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteCustomAppIntegrationOutputPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCustomAppIntegrationOutputPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java index 39e0cecb4..b3f15d3cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete Custom OAuth App Integration */ @Generated +@JsonSerialize( + using = DeleteCustomAppIntegrationRequest.DeleteCustomAppIntegrationRequestSerializer.class) +@JsonDeserialize( + using = DeleteCustomAppIntegrationRequest.DeleteCustomAppIntegrationRequestDeserializer.class) public class DeleteCustomAppIntegrationRequest { /** */ - @JsonIgnore private String integrationId; + private String integrationId; public DeleteCustomAppIntegrationRequest setIntegrationId(String integrationId) { this.integrationId = integrationId; @@ -41,4 +54,42 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + DeleteCustomAppIntegrationRequestPb toPb() { + DeleteCustomAppIntegrationRequestPb pb = new DeleteCustomAppIntegrationRequestPb(); + pb.setIntegrationId(integrationId); + + return pb; + } + + static DeleteCustomAppIntegrationRequest fromPb(DeleteCustomAppIntegrationRequestPb pb) { + DeleteCustomAppIntegrationRequest model = new DeleteCustomAppIntegrationRequest(); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class DeleteCustomAppIntegrationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCustomAppIntegrationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCustomAppIntegrationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCustomAppIntegrationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCustomAppIntegrationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCustomAppIntegrationRequestPb pb = + mapper.readValue(p, DeleteCustomAppIntegrationRequestPb.class); + return DeleteCustomAppIntegrationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequestPb.java new file mode 100755 index 000000000..a7f03e946 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete Custom OAuth App Integration */ +@Generated +class DeleteCustomAppIntegrationRequestPb { + @JsonIgnore private String integrationId; + + public DeleteCustomAppIntegrationRequestPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCustomAppIntegrationRequestPb that = (DeleteCustomAppIntegrationRequestPb) o; + return Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId); + } + + @Override + public String toString() { + return new ToStringer(DeleteCustomAppIntegrationRequestPb.class) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java index cbc352c7d..662c127ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java @@ -4,9 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DeletePublishedAppIntegrationOutput.DeletePublishedAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = + DeletePublishedAppIntegrationOutput.DeletePublishedAppIntegrationOutputDeserializer.class) public class DeletePublishedAppIntegrationOutput { @Override @@ -25,4 +40,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePublishedAppIntegrationOutput.class).toString(); } + + DeletePublishedAppIntegrationOutputPb toPb() { + DeletePublishedAppIntegrationOutputPb pb = new DeletePublishedAppIntegrationOutputPb(); + + return pb; + } + + static DeletePublishedAppIntegrationOutput fromPb(DeletePublishedAppIntegrationOutputPb pb) { + DeletePublishedAppIntegrationOutput model = new DeletePublishedAppIntegrationOutput(); + + return model; + } + + public static class DeletePublishedAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePublishedAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePublishedAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePublishedAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public DeletePublishedAppIntegrationOutput deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePublishedAppIntegrationOutputPb pb = + mapper.readValue(p, DeletePublishedAppIntegrationOutputPb.class); + return DeletePublishedAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutputPb.java new file mode 100755 index 000000000..e1fea0d1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutputPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeletePublishedAppIntegrationOutputPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeletePublishedAppIntegrationOutputPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java index 9c4bb5563..a81ffb041 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete Published OAuth App Integration */ @Generated +@JsonSerialize( + using = + DeletePublishedAppIntegrationRequest.DeletePublishedAppIntegrationRequestSerializer.class) +@JsonDeserialize( + using = + DeletePublishedAppIntegrationRequest.DeletePublishedAppIntegrationRequestDeserializer.class) public class DeletePublishedAppIntegrationRequest { /** */ - @JsonIgnore private String integrationId; + private String integrationId; public DeletePublishedAppIntegrationRequest setIntegrationId(String integrationId) { this.integrationId = integrationId; @@ -41,4 +56,42 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + DeletePublishedAppIntegrationRequestPb toPb() { + DeletePublishedAppIntegrationRequestPb pb = new DeletePublishedAppIntegrationRequestPb(); + pb.setIntegrationId(integrationId); + + return pb; + } + + static DeletePublishedAppIntegrationRequest fromPb(DeletePublishedAppIntegrationRequestPb pb) { + DeletePublishedAppIntegrationRequest model = new DeletePublishedAppIntegrationRequest(); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class DeletePublishedAppIntegrationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePublishedAppIntegrationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePublishedAppIntegrationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePublishedAppIntegrationRequestDeserializer + extends JsonDeserializer { + @Override + public DeletePublishedAppIntegrationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePublishedAppIntegrationRequestPb pb = + mapper.readValue(p, DeletePublishedAppIntegrationRequestPb.class); + return DeletePublishedAppIntegrationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequestPb.java new file mode 100755 index 000000000..826191484 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete Published OAuth App Integration */ +@Generated +class DeletePublishedAppIntegrationRequestPb { + @JsonIgnore private String integrationId; + + public DeletePublishedAppIntegrationRequestPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePublishedAppIntegrationRequestPb that = (DeletePublishedAppIntegrationRequestPb) o; + return Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId); + } + + @Override + public String toString() { + return new ToStringer(DeletePublishedAppIntegrationRequestPb.class) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java index 6c5276a49..0fbc7c85b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponsePb.java new file mode 100755 index 000000000..a1a404f15 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java index 63b5dedae..91f679c82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java @@ -4,17 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete service principal federation policy */ @Generated +@JsonSerialize( + using = + DeleteServicePrincipalFederationPolicyRequest + .DeleteServicePrincipalFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + DeleteServicePrincipalFederationPolicyRequest + .DeleteServicePrincipalFederationPolicyRequestDeserializer.class) public class DeleteServicePrincipalFederationPolicyRequest { /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; /** The service principal id for the federation policy. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public DeleteServicePrincipalFederationPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -57,4 +74,49 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + DeleteServicePrincipalFederationPolicyRequestPb toPb() { + DeleteServicePrincipalFederationPolicyRequestPb pb = + new DeleteServicePrincipalFederationPolicyRequestPb(); + pb.setPolicyId(policyId); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static DeleteServicePrincipalFederationPolicyRequest fromPb( + DeleteServicePrincipalFederationPolicyRequestPb pb) { + DeleteServicePrincipalFederationPolicyRequest model = + new DeleteServicePrincipalFederationPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class DeleteServicePrincipalFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteServicePrincipalFederationPolicyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteServicePrincipalFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteServicePrincipalFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteServicePrincipalFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteServicePrincipalFederationPolicyRequestPb pb = + mapper.readValue(p, DeleteServicePrincipalFederationPolicyRequestPb.class); + return DeleteServicePrincipalFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequestPb.java new file mode 100755 index 000000000..36b52fe72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete service principal federation policy */ +@Generated +class DeleteServicePrincipalFederationPolicyRequestPb { + @JsonIgnore private String policyId; + + @JsonIgnore private Long servicePrincipalId; + + public DeleteServicePrincipalFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public DeleteServicePrincipalFederationPolicyRequestPb setServicePrincipalId( + Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServicePrincipalFederationPolicyRequestPb that = + (DeleteServicePrincipalFederationPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteServicePrincipalFederationPolicyRequestPb.class) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java index 8968c06d7..d99681307 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java @@ -4,17 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete service principal secret */ @Generated +@JsonSerialize( + using = DeleteServicePrincipalSecretRequest.DeleteServicePrincipalSecretRequestSerializer.class) +@JsonDeserialize( + using = + DeleteServicePrincipalSecretRequest.DeleteServicePrincipalSecretRequestDeserializer.class) public class DeleteServicePrincipalSecretRequest { /** The secret ID. */ - @JsonIgnore private String secretId; + private String secretId; /** The service principal ID. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public DeleteServicePrincipalSecretRequest setSecretId(String secretId) { this.secretId = secretId; @@ -55,4 +69,44 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + DeleteServicePrincipalSecretRequestPb toPb() { + DeleteServicePrincipalSecretRequestPb pb = new DeleteServicePrincipalSecretRequestPb(); + pb.setSecretId(secretId); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static DeleteServicePrincipalSecretRequest fromPb(DeleteServicePrincipalSecretRequestPb pb) { + DeleteServicePrincipalSecretRequest model = new DeleteServicePrincipalSecretRequest(); + model.setSecretId(pb.getSecretId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class DeleteServicePrincipalSecretRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteServicePrincipalSecretRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteServicePrincipalSecretRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteServicePrincipalSecretRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteServicePrincipalSecretRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteServicePrincipalSecretRequestPb pb = + mapper.readValue(p, DeleteServicePrincipalSecretRequestPb.class); + return DeleteServicePrincipalSecretRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequestPb.java new file mode 100755 index 000000000..269670ce3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete service principal secret */ +@Generated +class DeleteServicePrincipalSecretRequestPb { + @JsonIgnore private String secretId; + + @JsonIgnore private Long servicePrincipalId; + + public DeleteServicePrincipalSecretRequestPb setSecretId(String secretId) { + this.secretId = secretId; + return this; + } + + public String getSecretId() { + return secretId; + } + + public DeleteServicePrincipalSecretRequestPb setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServicePrincipalSecretRequestPb that = (DeleteServicePrincipalSecretRequestPb) o; + return Objects.equals(secretId, that.secretId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(secretId, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteServicePrincipalSecretRequestPb.class) + .add("secretId", secretId) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java index 18db0601f..f80d86783 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FederationPolicy.FederationPolicySerializer.class) +@JsonDeserialize(using = FederationPolicy.FederationPolicyDeserializer.class) public class FederationPolicy { /** Creation time of the federation policy. */ - @JsonProperty("create_time") private String createTime; /** Description of the federation policy. */ - @JsonProperty("description") private String description; /** @@ -26,30 +35,24 @@ public class FederationPolicy { * to be specified in create or update requests. If specified in a request, must match the value * in the request URL. */ - @JsonProperty("name") private String name; /** Specifies the policy to use for validating OIDC claims in your federated tokens. */ - @JsonProperty("oidc_policy") private OidcFederationPolicy oidcPolicy; /** The ID of the federation policy. */ - @JsonProperty("policy_id") private String policyId; /** * The service principal ID that this federation policy applies to. Only set for service principal * federation policies. */ - @JsonProperty("service_principal_id") private Long servicePrincipalId; /** Unique, immutable id of the federation policy. */ - @JsonProperty("uid") private String uid; /** Last update time of the federation policy. */ - @JsonProperty("update_time") private String updateTime; public FederationPolicy setCreateTime(String createTime) { @@ -158,4 +161,52 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + FederationPolicyPb toPb() { + FederationPolicyPb pb = new FederationPolicyPb(); + pb.setCreateTime(createTime); + pb.setDescription(description); + pb.setName(name); + pb.setOidcPolicy(oidcPolicy); + pb.setPolicyId(policyId); + pb.setServicePrincipalId(servicePrincipalId); + pb.setUid(uid); + pb.setUpdateTime(updateTime); + + return pb; + } + + static FederationPolicy fromPb(FederationPolicyPb pb) { + FederationPolicy model = new FederationPolicy(); + model.setCreateTime(pb.getCreateTime()); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setOidcPolicy(pb.getOidcPolicy()); + model.setPolicyId(pb.getPolicyId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + model.setUid(pb.getUid()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class FederationPolicySerializer extends JsonSerializer { + @Override + public void serialize(FederationPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FederationPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FederationPolicyDeserializer extends JsonDeserializer { + @Override + public FederationPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FederationPolicyPb pb = mapper.readValue(p, FederationPolicyPb.class); + return FederationPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicyPb.java new file mode 100755 index 000000000..2dafa6adb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicyPb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FederationPolicyPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("oidc_policy") + private OidcFederationPolicy oidcPolicy; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("service_principal_id") + private Long servicePrincipalId; + + @JsonProperty("uid") + private String uid; + + @JsonProperty("update_time") + private String updateTime; + + public FederationPolicyPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public FederationPolicyPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public FederationPolicyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FederationPolicyPb setOidcPolicy(OidcFederationPolicy oidcPolicy) { + this.oidcPolicy = oidcPolicy; + return this; + } + + public OidcFederationPolicy getOidcPolicy() { + return oidcPolicy; + } + + public FederationPolicyPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public FederationPolicyPb setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + public FederationPolicyPb setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + public FederationPolicyPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FederationPolicyPb that = (FederationPolicyPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(oidcPolicy, that.oidcPolicy) + && Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId) + && Objects.equals(uid, that.uid) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, description, name, oidcPolicy, policyId, servicePrincipalId, uid, updateTime); + } + + @Override + public String toString() { + return new ToStringer(FederationPolicyPb.class) + .add("createTime", createTime) + .add("description", description) + .add("name", name) + .add("oidcPolicy", oidcPolicy) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) + .add("uid", uid) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java index 2637cf8f6..068515ed0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get account federation policy */ @Generated +@JsonSerialize( + using = GetAccountFederationPolicyRequest.GetAccountFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = GetAccountFederationPolicyRequest.GetAccountFederationPolicyRequestDeserializer.class) public class GetAccountFederationPolicyRequest { /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; public GetAccountFederationPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -41,4 +54,42 @@ public String toString() { .add("policyId", policyId) .toString(); } + + GetAccountFederationPolicyRequestPb toPb() { + GetAccountFederationPolicyRequestPb pb = new GetAccountFederationPolicyRequestPb(); + pb.setPolicyId(policyId); + + return pb; + } + + static GetAccountFederationPolicyRequest fromPb(GetAccountFederationPolicyRequestPb pb) { + GetAccountFederationPolicyRequest model = new GetAccountFederationPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + + return model; + } + + public static class GetAccountFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountFederationPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountFederationPolicyRequestPb pb = + mapper.readValue(p, GetAccountFederationPolicyRequestPb.class); + return GetAccountFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequestPb.java new file mode 100755 index 000000000..d8536c286 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get account federation policy */ +@Generated +class GetAccountFederationPolicyRequestPb { + @JsonIgnore private String policyId; + + public GetAccountFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountFederationPolicyRequestPb that = (GetAccountFederationPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(GetAccountFederationPolicyRequestPb.class) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java index b085c10d1..b683fd175 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java @@ -4,59 +4,60 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetCustomAppIntegrationOutput.GetCustomAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = GetCustomAppIntegrationOutput.GetCustomAppIntegrationOutputDeserializer.class) public class GetCustomAppIntegrationOutput { /** The client id of the custom OAuth app */ - @JsonProperty("client_id") private String clientId; /** * This field indicates whether an OAuth client secret is required to authenticate this client. */ - @JsonProperty("confidential") private Boolean confidential; /** */ - @JsonProperty("create_time") private String createTime; /** */ - @JsonProperty("created_by") private Long createdBy; /** */ - @JsonProperty("creator_username") private String creatorUsername; /** ID of this custom app */ - @JsonProperty("integration_id") private String integrationId; /** The display name of the custom OAuth app */ - @JsonProperty("name") private String name; /** List of OAuth redirect urls */ - @JsonProperty("redirect_urls") private Collection redirectUrls; /** */ - @JsonProperty("scopes") private Collection scopes; /** Token access policy */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; /** * Scopes that will need to be consented by end user to mint the access token. If the user does * not authorize the access token will not be minted. Must be a subset of scopes. */ - @JsonProperty("user_authorized_scopes") private Collection userAuthorizedScopes; public GetCustomAppIntegrationOutput setClientId(String clientId) { @@ -209,4 +210,62 @@ public String toString() { .add("userAuthorizedScopes", userAuthorizedScopes) .toString(); } + + GetCustomAppIntegrationOutputPb toPb() { + GetCustomAppIntegrationOutputPb pb = new GetCustomAppIntegrationOutputPb(); + pb.setClientId(clientId); + pb.setConfidential(confidential); + pb.setCreateTime(createTime); + pb.setCreatedBy(createdBy); + pb.setCreatorUsername(creatorUsername); + pb.setIntegrationId(integrationId); + pb.setName(name); + pb.setRedirectUrls(redirectUrls); + pb.setScopes(scopes); + pb.setTokenAccessPolicy(tokenAccessPolicy); + pb.setUserAuthorizedScopes(userAuthorizedScopes); + + return pb; + } + + static GetCustomAppIntegrationOutput fromPb(GetCustomAppIntegrationOutputPb pb) { + GetCustomAppIntegrationOutput model = new GetCustomAppIntegrationOutput(); + model.setClientId(pb.getClientId()); + model.setConfidential(pb.getConfidential()); + model.setCreateTime(pb.getCreateTime()); + model.setCreatedBy(pb.getCreatedBy()); + model.setCreatorUsername(pb.getCreatorUsername()); + model.setIntegrationId(pb.getIntegrationId()); + model.setName(pb.getName()); + model.setRedirectUrls(pb.getRedirectUrls()); + model.setScopes(pb.getScopes()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + model.setUserAuthorizedScopes(pb.getUserAuthorizedScopes()); + + return model; + } + + public static class GetCustomAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCustomAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCustomAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCustomAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public GetCustomAppIntegrationOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCustomAppIntegrationOutputPb pb = + mapper.readValue(p, GetCustomAppIntegrationOutputPb.class); + return GetCustomAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutputPb.java new file mode 100755 index 000000000..c89843dce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutputPb.java @@ -0,0 +1,196 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetCustomAppIntegrationOutputPb { + @JsonProperty("client_id") + private String clientId; + + @JsonProperty("confidential") + private Boolean confidential; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("created_by") + private Long createdBy; + + @JsonProperty("creator_username") + private String creatorUsername; + + @JsonProperty("integration_id") + private String integrationId; + + @JsonProperty("name") + private String name; + + @JsonProperty("redirect_urls") + private Collection redirectUrls; + + @JsonProperty("scopes") + private Collection scopes; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + @JsonProperty("user_authorized_scopes") + private Collection userAuthorizedScopes; + + public GetCustomAppIntegrationOutputPb setClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public String getClientId() { + return clientId; + } + + public GetCustomAppIntegrationOutputPb setConfidential(Boolean confidential) { + this.confidential = confidential; + return this; + } + + public Boolean getConfidential() { + return confidential; + } + + public GetCustomAppIntegrationOutputPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public GetCustomAppIntegrationOutputPb setCreatedBy(Long createdBy) { + this.createdBy = createdBy; + return this; + } + + public Long getCreatedBy() { + return createdBy; + } + + public GetCustomAppIntegrationOutputPb setCreatorUsername(String creatorUsername) { + this.creatorUsername = creatorUsername; + return this; + } + + public String getCreatorUsername() { + return creatorUsername; + } + + public GetCustomAppIntegrationOutputPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + public GetCustomAppIntegrationOutputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetCustomAppIntegrationOutputPb setRedirectUrls(Collection redirectUrls) { + this.redirectUrls = redirectUrls; + return this; + } + + public Collection getRedirectUrls() { + return redirectUrls; + } + + public GetCustomAppIntegrationOutputPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + public GetCustomAppIntegrationOutputPb setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + public GetCustomAppIntegrationOutputPb setUserAuthorizedScopes( + Collection userAuthorizedScopes) { + this.userAuthorizedScopes = userAuthorizedScopes; + return this; + } + + public Collection getUserAuthorizedScopes() { + return userAuthorizedScopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCustomAppIntegrationOutputPb that = (GetCustomAppIntegrationOutputPb) o; + return Objects.equals(clientId, that.clientId) + && Objects.equals(confidential, that.confidential) + && Objects.equals(createTime, that.createTime) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(creatorUsername, that.creatorUsername) + && Objects.equals(integrationId, that.integrationId) + && Objects.equals(name, that.name) + && Objects.equals(redirectUrls, that.redirectUrls) + && Objects.equals(scopes, that.scopes) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy) + && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes); + } + + @Override + public int hashCode() { + return Objects.hash( + clientId, + confidential, + createTime, + createdBy, + creatorUsername, + integrationId, + name, + redirectUrls, + scopes, + tokenAccessPolicy, + userAuthorizedScopes); + } + + @Override + public String toString() { + return new ToStringer(GetCustomAppIntegrationOutputPb.class) + .add("clientId", clientId) + .add("confidential", confidential) + .add("createTime", createTime) + .add("createdBy", createdBy) + .add("creatorUsername", creatorUsername) + .add("integrationId", integrationId) + .add("name", name) + .add("redirectUrls", redirectUrls) + .add("scopes", scopes) + .add("tokenAccessPolicy", tokenAccessPolicy) + .add("userAuthorizedScopes", userAuthorizedScopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequest.java index 6b0c6115a..58c30ecbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get OAuth Custom App Integration */ @Generated +@JsonSerialize( + using = GetCustomAppIntegrationRequest.GetCustomAppIntegrationRequestSerializer.class) +@JsonDeserialize( + using = GetCustomAppIntegrationRequest.GetCustomAppIntegrationRequestDeserializer.class) public class GetCustomAppIntegrationRequest { /** The OAuth app integration ID. */ - @JsonIgnore private String integrationId; + private String integrationId; public GetCustomAppIntegrationRequest setIntegrationId(String integrationId) { this.integrationId = integrationId; @@ -41,4 +54,42 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + GetCustomAppIntegrationRequestPb toPb() { + GetCustomAppIntegrationRequestPb pb = new GetCustomAppIntegrationRequestPb(); + pb.setIntegrationId(integrationId); + + return pb; + } + + static GetCustomAppIntegrationRequest fromPb(GetCustomAppIntegrationRequestPb pb) { + GetCustomAppIntegrationRequest model = new GetCustomAppIntegrationRequest(); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class GetCustomAppIntegrationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCustomAppIntegrationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCustomAppIntegrationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCustomAppIntegrationRequestDeserializer + extends JsonDeserializer { + @Override + public GetCustomAppIntegrationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCustomAppIntegrationRequestPb pb = + mapper.readValue(p, GetCustomAppIntegrationRequestPb.class); + return GetCustomAppIntegrationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequestPb.java new file mode 100755 index 000000000..a55c99705 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get OAuth Custom App Integration */ +@Generated +class GetCustomAppIntegrationRequestPb { + @JsonIgnore private String integrationId; + + public GetCustomAppIntegrationRequestPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCustomAppIntegrationRequestPb that = (GetCustomAppIntegrationRequestPb) o; + return Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId); + } + + @Override + public String toString() { + return new ToStringer(GetCustomAppIntegrationRequestPb.class) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutput.java index f2e313bf2..2a0aeef4f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutput.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetCustomAppIntegrationsOutput.GetCustomAppIntegrationsOutputSerializer.class) +@JsonDeserialize( + using = GetCustomAppIntegrationsOutput.GetCustomAppIntegrationsOutputDeserializer.class) public class GetCustomAppIntegrationsOutput { /** List of Custom OAuth App Integrations defined for the account. */ - @JsonProperty("apps") private Collection apps; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public GetCustomAppIntegrationsOutput setApps(Collection apps) { @@ -56,4 +67,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetCustomAppIntegrationsOutputPb toPb() { + GetCustomAppIntegrationsOutputPb pb = new GetCustomAppIntegrationsOutputPb(); + pb.setApps(apps); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetCustomAppIntegrationsOutput fromPb(GetCustomAppIntegrationsOutputPb pb) { + GetCustomAppIntegrationsOutput model = new GetCustomAppIntegrationsOutput(); + model.setApps(pb.getApps()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetCustomAppIntegrationsOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCustomAppIntegrationsOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCustomAppIntegrationsOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCustomAppIntegrationsOutputDeserializer + extends JsonDeserializer { + @Override + public GetCustomAppIntegrationsOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCustomAppIntegrationsOutputPb pb = + mapper.readValue(p, GetCustomAppIntegrationsOutputPb.class); + return GetCustomAppIntegrationsOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutputPb.java new file mode 100755 index 000000000..f93bcbaaf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationsOutputPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetCustomAppIntegrationsOutputPb { + @JsonProperty("apps") + private Collection apps; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetCustomAppIntegrationsOutputPb setApps(Collection apps) { + this.apps = apps; + return this; + } + + public Collection getApps() { + return apps; + } + + public GetCustomAppIntegrationsOutputPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCustomAppIntegrationsOutputPb that = (GetCustomAppIntegrationsOutputPb) o; + return Objects.equals(apps, that.apps) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(apps, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetCustomAppIntegrationsOutputPb.class) + .add("apps", apps) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutput.java index e9349c9b2..cf7200a71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutput.java @@ -4,33 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = GetPublishedAppIntegrationOutput.GetPublishedAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = GetPublishedAppIntegrationOutput.GetPublishedAppIntegrationOutputDeserializer.class) public class GetPublishedAppIntegrationOutput { /** App-id of the published app integration */ - @JsonProperty("app_id") private String appId; /** */ - @JsonProperty("create_time") private String createTime; /** */ - @JsonProperty("created_by") private Long createdBy; /** Unique integration id for the published OAuth app */ - @JsonProperty("integration_id") private String integrationId; /** Display name of the published OAuth app */ - @JsonProperty("name") private String name; /** Token access policy */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; public GetPublishedAppIntegrationOutput setAppId(String appId) { @@ -117,4 +124,52 @@ public String toString() { .add("tokenAccessPolicy", tokenAccessPolicy) .toString(); } + + GetPublishedAppIntegrationOutputPb toPb() { + GetPublishedAppIntegrationOutputPb pb = new GetPublishedAppIntegrationOutputPb(); + pb.setAppId(appId); + pb.setCreateTime(createTime); + pb.setCreatedBy(createdBy); + pb.setIntegrationId(integrationId); + pb.setName(name); + pb.setTokenAccessPolicy(tokenAccessPolicy); + + return pb; + } + + static GetPublishedAppIntegrationOutput fromPb(GetPublishedAppIntegrationOutputPb pb) { + GetPublishedAppIntegrationOutput model = new GetPublishedAppIntegrationOutput(); + model.setAppId(pb.getAppId()); + model.setCreateTime(pb.getCreateTime()); + model.setCreatedBy(pb.getCreatedBy()); + model.setIntegrationId(pb.getIntegrationId()); + model.setName(pb.getName()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + + return model; + } + + public static class GetPublishedAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public GetPublishedAppIntegrationOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedAppIntegrationOutputPb pb = + mapper.readValue(p, GetPublishedAppIntegrationOutputPb.class); + return GetPublishedAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutputPb.java new file mode 100755 index 000000000..2f704184e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationOutputPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetPublishedAppIntegrationOutputPb { + @JsonProperty("app_id") + private String appId; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("created_by") + private Long createdBy; + + @JsonProperty("integration_id") + private String integrationId; + + @JsonProperty("name") + private String name; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + public GetPublishedAppIntegrationOutputPb setAppId(String appId) { + this.appId = appId; + return this; + } + + public String getAppId() { + return appId; + } + + public GetPublishedAppIntegrationOutputPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public GetPublishedAppIntegrationOutputPb setCreatedBy(Long createdBy) { + this.createdBy = createdBy; + return this; + } + + public Long getCreatedBy() { + return createdBy; + } + + public GetPublishedAppIntegrationOutputPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + public GetPublishedAppIntegrationOutputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetPublishedAppIntegrationOutputPb setTokenAccessPolicy( + TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedAppIntegrationOutputPb that = (GetPublishedAppIntegrationOutputPb) o; + return Objects.equals(appId, that.appId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(integrationId, that.integrationId) + && Objects.equals(name, that.name) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(appId, createTime, createdBy, integrationId, name, tokenAccessPolicy); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedAppIntegrationOutputPb.class) + .add("appId", appId) + .add("createTime", createTime) + .add("createdBy", createdBy) + .add("integrationId", integrationId) + .add("name", name) + .add("tokenAccessPolicy", tokenAccessPolicy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequest.java index 8d9b3f718..a5b8af67b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get OAuth Published App Integration */ @Generated +@JsonSerialize( + using = GetPublishedAppIntegrationRequest.GetPublishedAppIntegrationRequestSerializer.class) +@JsonDeserialize( + using = GetPublishedAppIntegrationRequest.GetPublishedAppIntegrationRequestDeserializer.class) public class GetPublishedAppIntegrationRequest { /** */ - @JsonIgnore private String integrationId; + private String integrationId; public GetPublishedAppIntegrationRequest setIntegrationId(String integrationId) { this.integrationId = integrationId; @@ -41,4 +54,42 @@ public String toString() { .add("integrationId", integrationId) .toString(); } + + GetPublishedAppIntegrationRequestPb toPb() { + GetPublishedAppIntegrationRequestPb pb = new GetPublishedAppIntegrationRequestPb(); + pb.setIntegrationId(integrationId); + + return pb; + } + + static GetPublishedAppIntegrationRequest fromPb(GetPublishedAppIntegrationRequestPb pb) { + GetPublishedAppIntegrationRequest model = new GetPublishedAppIntegrationRequest(); + model.setIntegrationId(pb.getIntegrationId()); + + return model; + } + + public static class GetPublishedAppIntegrationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedAppIntegrationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedAppIntegrationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedAppIntegrationRequestDeserializer + extends JsonDeserializer { + @Override + public GetPublishedAppIntegrationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedAppIntegrationRequestPb pb = + mapper.readValue(p, GetPublishedAppIntegrationRequestPb.class); + return GetPublishedAppIntegrationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequestPb.java new file mode 100755 index 000000000..b9efb4cbe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get OAuth Published App Integration */ +@Generated +class GetPublishedAppIntegrationRequestPb { + @JsonIgnore private String integrationId; + + public GetPublishedAppIntegrationRequestPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedAppIntegrationRequestPb that = (GetPublishedAppIntegrationRequestPb) o; + return Objects.equals(integrationId, that.integrationId); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedAppIntegrationRequestPb.class) + .add("integrationId", integrationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutput.java index 8b3da1890..ef8bea573 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutput.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetPublishedAppIntegrationsOutput.GetPublishedAppIntegrationsOutputSerializer.class) +@JsonDeserialize( + using = GetPublishedAppIntegrationsOutput.GetPublishedAppIntegrationsOutputDeserializer.class) public class GetPublishedAppIntegrationsOutput { /** List of Published OAuth App Integrations defined for the account. */ - @JsonProperty("apps") private Collection apps; /** */ - @JsonProperty("next_page_token") private String nextPageToken; public GetPublishedAppIntegrationsOutput setApps( @@ -57,4 +68,44 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetPublishedAppIntegrationsOutputPb toPb() { + GetPublishedAppIntegrationsOutputPb pb = new GetPublishedAppIntegrationsOutputPb(); + pb.setApps(apps); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetPublishedAppIntegrationsOutput fromPb(GetPublishedAppIntegrationsOutputPb pb) { + GetPublishedAppIntegrationsOutput model = new GetPublishedAppIntegrationsOutput(); + model.setApps(pb.getApps()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetPublishedAppIntegrationsOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedAppIntegrationsOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedAppIntegrationsOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedAppIntegrationsOutputDeserializer + extends JsonDeserializer { + @Override + public GetPublishedAppIntegrationsOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedAppIntegrationsOutputPb pb = + mapper.readValue(p, GetPublishedAppIntegrationsOutputPb.class); + return GetPublishedAppIntegrationsOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutputPb.java new file mode 100755 index 000000000..c2eb525e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppIntegrationsOutputPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPublishedAppIntegrationsOutputPb { + @JsonProperty("apps") + private Collection apps; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetPublishedAppIntegrationsOutputPb setApps( + Collection apps) { + this.apps = apps; + return this; + } + + public Collection getApps() { + return apps; + } + + public GetPublishedAppIntegrationsOutputPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedAppIntegrationsOutputPb that = (GetPublishedAppIntegrationsOutputPb) o; + return Objects.equals(apps, that.apps) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(apps, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedAppIntegrationsOutputPb.class) + .add("apps", apps) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutput.java index 10d177500..5d979bee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutput.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetPublishedAppsOutput.GetPublishedAppsOutputSerializer.class) +@JsonDeserialize(using = GetPublishedAppsOutput.GetPublishedAppsOutputDeserializer.class) public class GetPublishedAppsOutput { /** List of Published OAuth Apps. */ - @JsonProperty("apps") private Collection apps; /** * A token that can be used to get the next page of results. If not present, there are no more * results to show. */ - @JsonProperty("next_page_token") private String nextPageToken; public GetPublishedAppsOutput setApps(Collection apps) { @@ -59,4 +68,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + GetPublishedAppsOutputPb toPb() { + GetPublishedAppsOutputPb pb = new GetPublishedAppsOutputPb(); + pb.setApps(apps); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static GetPublishedAppsOutput fromPb(GetPublishedAppsOutputPb pb) { + GetPublishedAppsOutput model = new GetPublishedAppsOutput(); + model.setApps(pb.getApps()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class GetPublishedAppsOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPublishedAppsOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPublishedAppsOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPublishedAppsOutputDeserializer + extends JsonDeserializer { + @Override + public GetPublishedAppsOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPublishedAppsOutputPb pb = mapper.readValue(p, GetPublishedAppsOutputPb.class); + return GetPublishedAppsOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutputPb.java new file mode 100755 index 000000000..b2a7999ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetPublishedAppsOutputPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPublishedAppsOutputPb { + @JsonProperty("apps") + private Collection apps; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetPublishedAppsOutputPb setApps(Collection apps) { + this.apps = apps; + return this; + } + + public Collection getApps() { + return apps; + } + + public GetPublishedAppsOutputPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedAppsOutputPb that = (GetPublishedAppsOutputPb) o; + return Objects.equals(apps, that.apps) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(apps, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedAppsOutputPb.class) + .add("apps", apps) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java index a9e986b21..e22dbb123 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java @@ -4,17 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get service principal federation policy */ @Generated +@JsonSerialize( + using = + GetServicePrincipalFederationPolicyRequest + .GetServicePrincipalFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + GetServicePrincipalFederationPolicyRequest + .GetServicePrincipalFederationPolicyRequestDeserializer.class) public class GetServicePrincipalFederationPolicyRequest { /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; /** The service principal id for the federation policy. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public GetServicePrincipalFederationPolicyRequest setPolicyId(String policyId) { this.policyId = policyId; @@ -56,4 +73,49 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + GetServicePrincipalFederationPolicyRequestPb toPb() { + GetServicePrincipalFederationPolicyRequestPb pb = + new GetServicePrincipalFederationPolicyRequestPb(); + pb.setPolicyId(policyId); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static GetServicePrincipalFederationPolicyRequest fromPb( + GetServicePrincipalFederationPolicyRequestPb pb) { + GetServicePrincipalFederationPolicyRequest model = + new GetServicePrincipalFederationPolicyRequest(); + model.setPolicyId(pb.getPolicyId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class GetServicePrincipalFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServicePrincipalFederationPolicyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetServicePrincipalFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServicePrincipalFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetServicePrincipalFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServicePrincipalFederationPolicyRequestPb pb = + mapper.readValue(p, GetServicePrincipalFederationPolicyRequestPb.class); + return GetServicePrincipalFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequestPb.java new file mode 100755 index 000000000..8457f7789 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get service principal federation policy */ +@Generated +class GetServicePrincipalFederationPolicyRequestPb { + @JsonIgnore private String policyId; + + @JsonIgnore private Long servicePrincipalId; + + public GetServicePrincipalFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public GetServicePrincipalFederationPolicyRequestPb setServicePrincipalId( + Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServicePrincipalFederationPolicyRequestPb that = + (GetServicePrincipalFederationPolicyRequestPb) o; + return Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(GetServicePrincipalFederationPolicyRequestPb.class) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequest.java index cda3d1b8a..52255e4af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequest.java @@ -3,22 +3,32 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List account federation policies */ @Generated +@JsonSerialize( + using = + ListAccountFederationPoliciesRequest.ListAccountFederationPoliciesRequestSerializer.class) +@JsonDeserialize( + using = + ListAccountFederationPoliciesRequest.ListAccountFederationPoliciesRequestDeserializer.class) public class ListAccountFederationPoliciesRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAccountFederationPoliciesRequest setPageSize(Long pageSize) { @@ -59,4 +69,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAccountFederationPoliciesRequestPb toPb() { + ListAccountFederationPoliciesRequestPb pb = new ListAccountFederationPoliciesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAccountFederationPoliciesRequest fromPb(ListAccountFederationPoliciesRequestPb pb) { + ListAccountFederationPoliciesRequest model = new ListAccountFederationPoliciesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAccountFederationPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAccountFederationPoliciesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAccountFederationPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAccountFederationPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListAccountFederationPoliciesRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAccountFederationPoliciesRequestPb pb = + mapper.readValue(p, ListAccountFederationPoliciesRequestPb.class); + return ListAccountFederationPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequestPb.java new file mode 100755 index 000000000..101504111 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListAccountFederationPoliciesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List account federation policies */ +@Generated +class ListAccountFederationPoliciesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAccountFederationPoliciesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAccountFederationPoliciesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAccountFederationPoliciesRequestPb that = (ListAccountFederationPoliciesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAccountFederationPoliciesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequest.java index b85059720..aa70de8eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequest.java @@ -3,27 +3,33 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get custom oauth app integrations */ @Generated +@JsonSerialize( + using = ListCustomAppIntegrationsRequest.ListCustomAppIntegrationsRequestSerializer.class) +@JsonDeserialize( + using = ListCustomAppIntegrationsRequest.ListCustomAppIntegrationsRequestDeserializer.class) public class ListCustomAppIntegrationsRequest { /** */ - @JsonIgnore - @QueryParam("include_creator_username") private Boolean includeCreatorUsername; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListCustomAppIntegrationsRequest setIncludeCreatorUsername( @@ -77,4 +83,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListCustomAppIntegrationsRequestPb toPb() { + ListCustomAppIntegrationsRequestPb pb = new ListCustomAppIntegrationsRequestPb(); + pb.setIncludeCreatorUsername(includeCreatorUsername); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListCustomAppIntegrationsRequest fromPb(ListCustomAppIntegrationsRequestPb pb) { + ListCustomAppIntegrationsRequest model = new ListCustomAppIntegrationsRequest(); + model.setIncludeCreatorUsername(pb.getIncludeCreatorUsername()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListCustomAppIntegrationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCustomAppIntegrationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCustomAppIntegrationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCustomAppIntegrationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListCustomAppIntegrationsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCustomAppIntegrationsRequestPb pb = + mapper.readValue(p, ListCustomAppIntegrationsRequestPb.class); + return ListCustomAppIntegrationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequestPb.java new file mode 100755 index 000000000..a66c1140a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListCustomAppIntegrationsRequestPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get custom oauth app integrations */ +@Generated +class ListCustomAppIntegrationsRequestPb { + @JsonIgnore + @QueryParam("include_creator_username") + private Boolean includeCreatorUsername; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListCustomAppIntegrationsRequestPb setIncludeCreatorUsername( + Boolean includeCreatorUsername) { + this.includeCreatorUsername = includeCreatorUsername; + return this; + } + + public Boolean getIncludeCreatorUsername() { + return includeCreatorUsername; + } + + public ListCustomAppIntegrationsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListCustomAppIntegrationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCustomAppIntegrationsRequestPb that = (ListCustomAppIntegrationsRequestPb) o; + return Objects.equals(includeCreatorUsername, that.includeCreatorUsername) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(includeCreatorUsername, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListCustomAppIntegrationsRequestPb.class) + .add("includeCreatorUsername", includeCreatorUsername) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponse.java index a4c77a09b..701b744e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListFederationPoliciesResponse.ListFederationPoliciesResponseSerializer.class) +@JsonDeserialize( + using = ListFederationPoliciesResponse.ListFederationPoliciesResponseDeserializer.class) public class ListFederationPoliciesResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("policies") private Collection policies; public ListFederationPoliciesResponse setNextPageToken(String nextPageToken) { @@ -57,4 +68,44 @@ public String toString() { .add("policies", policies) .toString(); } + + ListFederationPoliciesResponsePb toPb() { + ListFederationPoliciesResponsePb pb = new ListFederationPoliciesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPolicies(policies); + + return pb; + } + + static ListFederationPoliciesResponse fromPb(ListFederationPoliciesResponsePb pb) { + ListFederationPoliciesResponse model = new ListFederationPoliciesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPolicies(pb.getPolicies()); + + return model; + } + + public static class ListFederationPoliciesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFederationPoliciesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFederationPoliciesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFederationPoliciesResponseDeserializer + extends JsonDeserializer { + @Override + public ListFederationPoliciesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFederationPoliciesResponsePb pb = + mapper.readValue(p, ListFederationPoliciesResponsePb.class); + return ListFederationPoliciesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponsePb.java new file mode 100755 index 000000000..446c432c6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListFederationPoliciesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListFederationPoliciesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("policies") + private Collection policies; + + public ListFederationPoliciesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListFederationPoliciesResponsePb setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFederationPoliciesResponsePb that = (ListFederationPoliciesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policies, that.policies); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policies); + } + + @Override + public String toString() { + return new ToStringer(ListFederationPoliciesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("policies", policies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequest.java index 1a8576228..8cd1bd503 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequest.java @@ -3,22 +3,29 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get all the published OAuth apps */ @Generated +@JsonSerialize(using = ListOAuthPublishedAppsRequest.ListOAuthPublishedAppsRequestSerializer.class) +@JsonDeserialize( + using = ListOAuthPublishedAppsRequest.ListOAuthPublishedAppsRequestDeserializer.class) public class ListOAuthPublishedAppsRequest { /** The max number of OAuth published apps to return in one page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** A token that can be used to get the next page of results. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListOAuthPublishedAppsRequest setPageSize(Long pageSize) { @@ -59,4 +66,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListOAuthPublishedAppsRequestPb toPb() { + ListOAuthPublishedAppsRequestPb pb = new ListOAuthPublishedAppsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListOAuthPublishedAppsRequest fromPb(ListOAuthPublishedAppsRequestPb pb) { + ListOAuthPublishedAppsRequest model = new ListOAuthPublishedAppsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListOAuthPublishedAppsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListOAuthPublishedAppsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListOAuthPublishedAppsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListOAuthPublishedAppsRequestDeserializer + extends JsonDeserializer { + @Override + public ListOAuthPublishedAppsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListOAuthPublishedAppsRequestPb pb = + mapper.readValue(p, ListOAuthPublishedAppsRequestPb.class); + return ListOAuthPublishedAppsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequestPb.java new file mode 100755 index 000000000..14040676c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListOAuthPublishedAppsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get all the published OAuth apps */ +@Generated +class ListOAuthPublishedAppsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListOAuthPublishedAppsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListOAuthPublishedAppsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListOAuthPublishedAppsRequestPb that = (ListOAuthPublishedAppsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListOAuthPublishedAppsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequest.java index cded44d9e..9e864eb15 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequest.java @@ -3,22 +3,31 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get published oauth app integrations */ @Generated +@JsonSerialize( + using = ListPublishedAppIntegrationsRequest.ListPublishedAppIntegrationsRequestSerializer.class) +@JsonDeserialize( + using = + ListPublishedAppIntegrationsRequest.ListPublishedAppIntegrationsRequestDeserializer.class) public class ListPublishedAppIntegrationsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListPublishedAppIntegrationsRequest setPageSize(Long pageSize) { @@ -59,4 +68,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListPublishedAppIntegrationsRequestPb toPb() { + ListPublishedAppIntegrationsRequestPb pb = new ListPublishedAppIntegrationsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListPublishedAppIntegrationsRequest fromPb(ListPublishedAppIntegrationsRequestPb pb) { + ListPublishedAppIntegrationsRequest model = new ListPublishedAppIntegrationsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListPublishedAppIntegrationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPublishedAppIntegrationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPublishedAppIntegrationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPublishedAppIntegrationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListPublishedAppIntegrationsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPublishedAppIntegrationsRequestPb pb = + mapper.readValue(p, ListPublishedAppIntegrationsRequestPb.class); + return ListPublishedAppIntegrationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequestPb.java new file mode 100755 index 000000000..3649b9c9a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListPublishedAppIntegrationsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get published oauth app integrations */ +@Generated +class ListPublishedAppIntegrationsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListPublishedAppIntegrationsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListPublishedAppIntegrationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPublishedAppIntegrationsRequestPb that = (ListPublishedAppIntegrationsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPublishedAppIntegrationsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequest.java index 52ebe4dbb..cbb007edc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequest.java @@ -3,26 +3,38 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List service principal federation policies */ @Generated +@JsonSerialize( + using = + ListServicePrincipalFederationPoliciesRequest + .ListServicePrincipalFederationPoliciesRequestSerializer.class) +@JsonDeserialize( + using = + ListServicePrincipalFederationPoliciesRequest + .ListServicePrincipalFederationPoliciesRequestDeserializer.class) public class ListServicePrincipalFederationPoliciesRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The service principal id for the federation policy. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public ListServicePrincipalFederationPoliciesRequest setPageSize(Long pageSize) { this.pageSize = pageSize; @@ -76,4 +88,51 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + ListServicePrincipalFederationPoliciesRequestPb toPb() { + ListServicePrincipalFederationPoliciesRequestPb pb = + new ListServicePrincipalFederationPoliciesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static ListServicePrincipalFederationPoliciesRequest fromPb( + ListServicePrincipalFederationPoliciesRequestPb pb) { + ListServicePrincipalFederationPoliciesRequest model = + new ListServicePrincipalFederationPoliciesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class ListServicePrincipalFederationPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListServicePrincipalFederationPoliciesRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListServicePrincipalFederationPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListServicePrincipalFederationPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListServicePrincipalFederationPoliciesRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListServicePrincipalFederationPoliciesRequestPb pb = + mapper.readValue(p, ListServicePrincipalFederationPoliciesRequestPb.class); + return ListServicePrincipalFederationPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequestPb.java new file mode 100755 index 000000000..c44c6a852 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalFederationPoliciesRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List service principal federation policies */ +@Generated +class ListServicePrincipalFederationPoliciesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private Long servicePrincipalId; + + public ListServicePrincipalFederationPoliciesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListServicePrincipalFederationPoliciesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListServicePrincipalFederationPoliciesRequestPb setServicePrincipalId( + Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalFederationPoliciesRequestPb that = + (ListServicePrincipalFederationPoliciesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalFederationPoliciesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java index 816fb09bb..17982ba9a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List service principal secrets */ @Generated +@JsonSerialize( + using = ListServicePrincipalSecretsRequest.ListServicePrincipalSecretsRequestSerializer.class) +@JsonDeserialize( + using = ListServicePrincipalSecretsRequest.ListServicePrincipalSecretsRequestDeserializer.class) public class ListServicePrincipalSecretsRequest { /** * An opaque page token which was the `next_page_token` in the response of the previous request to @@ -20,12 +32,10 @@ public class ListServicePrincipalSecretsRequest { * `next_page_token`. Note that the number of entries returned must not be used to determine when * the listing is complete. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The service principal ID. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; public ListServicePrincipalSecretsRequest setPageToken(String pageToken) { this.pageToken = pageToken; @@ -66,4 +76,44 @@ public String toString() { .add("servicePrincipalId", servicePrincipalId) .toString(); } + + ListServicePrincipalSecretsRequestPb toPb() { + ListServicePrincipalSecretsRequestPb pb = new ListServicePrincipalSecretsRequestPb(); + pb.setPageToken(pageToken); + pb.setServicePrincipalId(servicePrincipalId); + + return pb; + } + + static ListServicePrincipalSecretsRequest fromPb(ListServicePrincipalSecretsRequestPb pb) { + ListServicePrincipalSecretsRequest model = new ListServicePrincipalSecretsRequest(); + model.setPageToken(pb.getPageToken()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + + return model; + } + + public static class ListServicePrincipalSecretsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListServicePrincipalSecretsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListServicePrincipalSecretsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListServicePrincipalSecretsRequestDeserializer + extends JsonDeserializer { + @Override + public ListServicePrincipalSecretsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListServicePrincipalSecretsRequestPb pb = + mapper.readValue(p, ListServicePrincipalSecretsRequestPb.class); + return ListServicePrincipalSecretsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequestPb.java new file mode 100755 index 000000000..b30f1a6fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List service principal secrets */ +@Generated +class ListServicePrincipalSecretsRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private Long servicePrincipalId; + + public ListServicePrincipalSecretsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListServicePrincipalSecretsRequestPb setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalSecretsRequestPb that = (ListServicePrincipalSecretsRequestPb) o; + return Objects.equals(pageToken, that.pageToken) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken, servicePrincipalId); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalSecretsRequestPb.class) + .add("pageToken", pageToken) + .add("servicePrincipalId", servicePrincipalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java index dd971e938..35d49212e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java @@ -4,18 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListServicePrincipalSecretsResponse.ListServicePrincipalSecretsResponseSerializer.class) +@JsonDeserialize( + using = + ListServicePrincipalSecretsResponse.ListServicePrincipalSecretsResponseDeserializer.class) public class ListServicePrincipalSecretsResponse { /** A token, which can be sent as `page_token` to retrieve the next page. */ - @JsonProperty("next_page_token") private String nextPageToken; /** List of the secrets */ - @JsonProperty("secrets") private Collection secrets; public ListServicePrincipalSecretsResponse setNextPageToken(String nextPageToken) { @@ -57,4 +69,44 @@ public String toString() { .add("secrets", secrets) .toString(); } + + ListServicePrincipalSecretsResponsePb toPb() { + ListServicePrincipalSecretsResponsePb pb = new ListServicePrincipalSecretsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setSecrets(secrets); + + return pb; + } + + static ListServicePrincipalSecretsResponse fromPb(ListServicePrincipalSecretsResponsePb pb) { + ListServicePrincipalSecretsResponse model = new ListServicePrincipalSecretsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setSecrets(pb.getSecrets()); + + return model; + } + + public static class ListServicePrincipalSecretsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListServicePrincipalSecretsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListServicePrincipalSecretsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListServicePrincipalSecretsResponseDeserializer + extends JsonDeserializer { + @Override + public ListServicePrincipalSecretsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListServicePrincipalSecretsResponsePb pb = + mapper.readValue(p, ListServicePrincipalSecretsResponsePb.class); + return ListServicePrincipalSecretsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponsePb.java new file mode 100755 index 000000000..682388ee9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListServicePrincipalSecretsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("secrets") + private Collection secrets; + + public ListServicePrincipalSecretsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListServicePrincipalSecretsResponsePb setSecrets(Collection secrets) { + this.secrets = secrets; + return this; + } + + public Collection getSecrets() { + return secrets; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalSecretsResponsePb that = (ListServicePrincipalSecretsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(secrets, that.secrets); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, secrets); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalSecretsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("secrets", secrets) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java index 0e53cf20a..a6523092f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java @@ -23,7 +23,7 @@ public GetPublishedAppsOutput list(ListOAuthPublishedAppsRequest request) { "/api/2.0/accounts/%s/oauth2/published-apps", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPublishedAppsOutput.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java index c6c8453cf..1ca23d948 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Specifies the policy to use for validating OIDC claims in your federated tokens. */ @Generated +@JsonSerialize(using = OidcFederationPolicy.OidcFederationPolicySerializer.class) +@JsonDeserialize(using = OidcFederationPolicy.OidcFederationPolicyDeserializer.class) public class OidcFederationPolicy { /** * The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience @@ -18,11 +29,9 @@ public class OidcFederationPolicy { * token is considered a match. If audiences is unspecified, defaults to your Databricks account * id. */ - @JsonProperty("audiences") private Collection audiences; /** The required token issuer, as specified in the 'iss' claim of federated tokens. */ - @JsonProperty("issuer") private String issuer; /** @@ -32,7 +41,6 @@ public class OidcFederationPolicy { * endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for * discovering public keys. */ - @JsonProperty("jwks_json") private String jwksJson; /** @@ -42,7 +50,6 @@ public class OidcFederationPolicy { * endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for * discovering public keys. */ - @JsonProperty("jwks_uri") private String jwksUri; /** @@ -50,13 +57,11 @@ public class OidcFederationPolicy { * specified for service principal federation policies. Must not be specified for account * federation policies. */ - @JsonProperty("subject") private String subject; /** * The claim that contains the subject of the token. If unspecified, the default value is 'sub'. */ - @JsonProperty("subject_claim") private String subjectClaim; public OidcFederationPolicy setAudiences(Collection audiences) { @@ -142,4 +147,50 @@ public String toString() { .add("subjectClaim", subjectClaim) .toString(); } + + OidcFederationPolicyPb toPb() { + OidcFederationPolicyPb pb = new OidcFederationPolicyPb(); + pb.setAudiences(audiences); + pb.setIssuer(issuer); + pb.setJwksJson(jwksJson); + pb.setJwksUri(jwksUri); + pb.setSubject(subject); + pb.setSubjectClaim(subjectClaim); + + return pb; + } + + static OidcFederationPolicy fromPb(OidcFederationPolicyPb pb) { + OidcFederationPolicy model = new OidcFederationPolicy(); + model.setAudiences(pb.getAudiences()); + model.setIssuer(pb.getIssuer()); + model.setJwksJson(pb.getJwksJson()); + model.setJwksUri(pb.getJwksUri()); + model.setSubject(pb.getSubject()); + model.setSubjectClaim(pb.getSubjectClaim()); + + return model; + } + + public static class OidcFederationPolicySerializer extends JsonSerializer { + @Override + public void serialize( + OidcFederationPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OidcFederationPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OidcFederationPolicyDeserializer + extends JsonDeserializer { + @Override + public OidcFederationPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OidcFederationPolicyPb pb = mapper.readValue(p, OidcFederationPolicyPb.class); + return OidcFederationPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicyPb.java new file mode 100755 index 000000000..bc4e8324c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicyPb.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Specifies the policy to use for validating OIDC claims in your federated tokens. */ +@Generated +class OidcFederationPolicyPb { + @JsonProperty("audiences") + private Collection audiences; + + @JsonProperty("issuer") + private String issuer; + + @JsonProperty("jwks_json") + private String jwksJson; + + @JsonProperty("jwks_uri") + private String jwksUri; + + @JsonProperty("subject") + private String subject; + + @JsonProperty("subject_claim") + private String subjectClaim; + + public OidcFederationPolicyPb setAudiences(Collection audiences) { + this.audiences = audiences; + return this; + } + + public Collection getAudiences() { + return audiences; + } + + public OidcFederationPolicyPb setIssuer(String issuer) { + this.issuer = issuer; + return this; + } + + public String getIssuer() { + return issuer; + } + + public OidcFederationPolicyPb setJwksJson(String jwksJson) { + this.jwksJson = jwksJson; + return this; + } + + public String getJwksJson() { + return jwksJson; + } + + public OidcFederationPolicyPb setJwksUri(String jwksUri) { + this.jwksUri = jwksUri; + return this; + } + + public String getJwksUri() { + return jwksUri; + } + + public OidcFederationPolicyPb setSubject(String subject) { + this.subject = subject; + return this; + } + + public String getSubject() { + return subject; + } + + public OidcFederationPolicyPb setSubjectClaim(String subjectClaim) { + this.subjectClaim = subjectClaim; + return this; + } + + public String getSubjectClaim() { + return subjectClaim; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OidcFederationPolicyPb that = (OidcFederationPolicyPb) o; + return Objects.equals(audiences, that.audiences) + && Objects.equals(issuer, that.issuer) + && Objects.equals(jwksJson, that.jwksJson) + && Objects.equals(jwksUri, that.jwksUri) + && Objects.equals(subject, that.subject) + && Objects.equals(subjectClaim, that.subjectClaim); + } + + @Override + public int hashCode() { + return Objects.hash(audiences, issuer, jwksJson, jwksUri, subject, subjectClaim); + } + + @Override + public String toString() { + return new ToStringer(OidcFederationPolicyPb.class) + .add("audiences", audiences) + .add("issuer", issuer) + .add("jwksJson", jwksJson) + .add("jwksUri", jwksUri) + .add("subject", subject) + .add("subjectClaim", subjectClaim) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java index e2592779a..c339a52f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java @@ -24,7 +24,7 @@ public CreatePublishedAppIntegrationOutput create(CreatePublishedAppIntegration apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreatePublishedAppIntegrationOutput.class); @@ -41,7 +41,7 @@ public void delete(DeletePublishedAppIntegrationRequest request) { apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeletePublishedAppIntegrationOutput.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public GetPublishedAppIntegrationOutput get(GetPublishedAppIntegrationRequest re apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPublishedAppIntegrationOutput.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public GetPublishedAppIntegrationsOutput list(ListPublishedAppIntegrationsReques apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPublishedAppIntegrationsOutput.class); } catch (IOException e) { @@ -89,7 +89,7 @@ public void update(UpdatePublishedAppIntegration request) { apiClient.configuredAccountID(), request.getIntegrationId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdatePublishedAppIntegrationOutput.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutput.java index 0777f1bf8..7dcea8056 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutput.java @@ -4,41 +4,45 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PublishedAppOutput.PublishedAppOutputSerializer.class) +@JsonDeserialize(using = PublishedAppOutput.PublishedAppOutputDeserializer.class) public class PublishedAppOutput { /** Unique ID of the published OAuth app. */ - @JsonProperty("app_id") private String appId; /** Client ID of the published OAuth app. It is the client_id in the OAuth flow */ - @JsonProperty("client_id") private String clientId; /** Description of the published OAuth app. */ - @JsonProperty("description") private String description; /** * Whether the published OAuth app is a confidential client. It is always false for published * OAuth apps. */ - @JsonProperty("is_confidential_client") private Boolean isConfidentialClient; /** The display name of the published OAuth app. */ - @JsonProperty("name") private String name; /** Redirect URLs of the published OAuth app. */ - @JsonProperty("redirect_urls") private Collection redirectUrls; /** Required scopes for the published OAuth app. */ - @JsonProperty("scopes") private Collection scopes; public PublishedAppOutput setAppId(String appId) { @@ -136,4 +140,50 @@ public String toString() { .add("scopes", scopes) .toString(); } + + PublishedAppOutputPb toPb() { + PublishedAppOutputPb pb = new PublishedAppOutputPb(); + pb.setAppId(appId); + pb.setClientId(clientId); + pb.setDescription(description); + pb.setIsConfidentialClient(isConfidentialClient); + pb.setName(name); + pb.setRedirectUrls(redirectUrls); + pb.setScopes(scopes); + + return pb; + } + + static PublishedAppOutput fromPb(PublishedAppOutputPb pb) { + PublishedAppOutput model = new PublishedAppOutput(); + model.setAppId(pb.getAppId()); + model.setClientId(pb.getClientId()); + model.setDescription(pb.getDescription()); + model.setIsConfidentialClient(pb.getIsConfidentialClient()); + model.setName(pb.getName()); + model.setRedirectUrls(pb.getRedirectUrls()); + model.setScopes(pb.getScopes()); + + return model; + } + + public static class PublishedAppOutputSerializer extends JsonSerializer { + @Override + public void serialize(PublishedAppOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PublishedAppOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PublishedAppOutputDeserializer extends JsonDeserializer { + @Override + public PublishedAppOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PublishedAppOutputPb pb = mapper.readValue(p, PublishedAppOutputPb.class); + return PublishedAppOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutputPb.java new file mode 100755 index 000000000..5a48c3711 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppOutputPb.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PublishedAppOutputPb { + @JsonProperty("app_id") + private String appId; + + @JsonProperty("client_id") + private String clientId; + + @JsonProperty("description") + private String description; + + @JsonProperty("is_confidential_client") + private Boolean isConfidentialClient; + + @JsonProperty("name") + private String name; + + @JsonProperty("redirect_urls") + private Collection redirectUrls; + + @JsonProperty("scopes") + private Collection scopes; + + public PublishedAppOutputPb setAppId(String appId) { + this.appId = appId; + return this; + } + + public String getAppId() { + return appId; + } + + public PublishedAppOutputPb setClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public String getClientId() { + return clientId; + } + + public PublishedAppOutputPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PublishedAppOutputPb setIsConfidentialClient(Boolean isConfidentialClient) { + this.isConfidentialClient = isConfidentialClient; + return this; + } + + public Boolean getIsConfidentialClient() { + return isConfidentialClient; + } + + public PublishedAppOutputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PublishedAppOutputPb setRedirectUrls(Collection redirectUrls) { + this.redirectUrls = redirectUrls; + return this; + } + + public Collection getRedirectUrls() { + return redirectUrls; + } + + public PublishedAppOutputPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PublishedAppOutputPb that = (PublishedAppOutputPb) o; + return Objects.equals(appId, that.appId) + && Objects.equals(clientId, that.clientId) + && Objects.equals(description, that.description) + && Objects.equals(isConfidentialClient, that.isConfidentialClient) + && Objects.equals(name, that.name) + && Objects.equals(redirectUrls, that.redirectUrls) + && Objects.equals(scopes, that.scopes); + } + + @Override + public int hashCode() { + return Objects.hash( + appId, clientId, description, isConfidentialClient, name, redirectUrls, scopes); + } + + @Override + public String toString() { + return new ToStringer(PublishedAppOutputPb.class) + .add("appId", appId) + .add("clientId", clientId) + .add("description", description) + .add("isConfidentialClient", isConfidentialClient) + .add("name", name) + .add("redirectUrls", redirectUrls) + .add("scopes", scopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java index 503275d5e..70dea6610 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java @@ -4,35 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SecretInfo.SecretInfoSerializer.class) +@JsonDeserialize(using = SecretInfo.SecretInfoDeserializer.class) public class SecretInfo { /** UTC time when the secret was created */ - @JsonProperty("create_time") private String createTime; /** * UTC time when the secret will expire. If the field is not present, the secret does not expire. */ - @JsonProperty("expire_time") private String expireTime; /** ID of the secret */ - @JsonProperty("id") private String id; /** Secret Hash */ - @JsonProperty("secret_hash") private String secretHash; /** Status of the secret */ - @JsonProperty("status") private String status; /** UTC time when the secret was updated */ - @JsonProperty("update_time") private String updateTime; public SecretInfo setCreateTime(String createTime) { @@ -118,4 +123,47 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + SecretInfoPb toPb() { + SecretInfoPb pb = new SecretInfoPb(); + pb.setCreateTime(createTime); + pb.setExpireTime(expireTime); + pb.setId(id); + pb.setSecretHash(secretHash); + pb.setStatus(status); + pb.setUpdateTime(updateTime); + + return pb; + } + + static SecretInfo fromPb(SecretInfoPb pb) { + SecretInfo model = new SecretInfo(); + model.setCreateTime(pb.getCreateTime()); + model.setExpireTime(pb.getExpireTime()); + model.setId(pb.getId()); + model.setSecretHash(pb.getSecretHash()); + model.setStatus(pb.getStatus()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class SecretInfoSerializer extends JsonSerializer { + @Override + public void serialize(SecretInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SecretInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SecretInfoDeserializer extends JsonDeserializer { + @Override + public SecretInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SecretInfoPb pb = mapper.readValue(p, SecretInfoPb.class); + return SecretInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfoPb.java new file mode 100755 index 000000000..28a483f4e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfoPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SecretInfoPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("expire_time") + private String expireTime; + + @JsonProperty("id") + private String id; + + @JsonProperty("secret_hash") + private String secretHash; + + @JsonProperty("status") + private String status; + + @JsonProperty("update_time") + private String updateTime; + + public SecretInfoPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public SecretInfoPb setExpireTime(String expireTime) { + this.expireTime = expireTime; + return this; + } + + public String getExpireTime() { + return expireTime; + } + + public SecretInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public SecretInfoPb setSecretHash(String secretHash) { + this.secretHash = secretHash; + return this; + } + + public String getSecretHash() { + return secretHash; + } + + public SecretInfoPb setStatus(String status) { + this.status = status; + return this; + } + + public String getStatus() { + return status; + } + + public SecretInfoPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SecretInfoPb that = (SecretInfoPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(expireTime, that.expireTime) + && Objects.equals(id, that.id) + && Objects.equals(secretHash, that.secretHash) + && Objects.equals(status, that.status) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash(createTime, expireTime, id, secretHash, status, updateTime); + } + + @Override + public String toString() { + return new ToStringer(SecretInfoPb.class) + .add("createTime", createTime) + .add("expireTime", expireTime) + .add("id", id) + .add("secretHash", secretHash) + .add("status", status) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java index 322518bc9..3e50677dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java @@ -24,7 +24,7 @@ public FederationPolicy create(CreateServicePrincipalFederationPolicyRequest req apiClient.configuredAccountID(), request.getServicePrincipalId()); try { Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); @@ -43,7 +43,7 @@ public void delete(DeleteServicePrincipalFederationPolicyRequest request) { request.getPolicyId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public FederationPolicy get(GetServicePrincipalFederationPolicyRequest request) request.getPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FederationPolicy.class); } catch (IOException e) { @@ -78,7 +78,7 @@ public ListFederationPoliciesResponse list( apiClient.configuredAccountID(), request.getServicePrincipalId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFederationPoliciesResponse.class); } catch (IOException e) { @@ -96,7 +96,7 @@ public FederationPolicy update(UpdateServicePrincipalFederationPolicyRequest req request.getPolicyId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java index 3579430b5..d287c7a79 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java @@ -24,7 +24,7 @@ public CreateServicePrincipalSecretResponse create(CreateServicePrincipalSecretR apiClient.configuredAccountID(), request.getServicePrincipalId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateServicePrincipalSecretResponse.class); @@ -43,7 +43,7 @@ public void delete(DeleteServicePrincipalSecretRequest request) { request.getSecretId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -58,7 +58,7 @@ public ListServicePrincipalSecretsResponse list(ListServicePrincipalSecretsReque apiClient.configuredAccountID(), request.getServicePrincipalId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListServicePrincipalSecretsResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java index ac3b6fd36..7b716ae05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenAccessPolicy.TokenAccessPolicySerializer.class) +@JsonDeserialize(using = TokenAccessPolicy.TokenAccessPolicyDeserializer.class) public class TokenAccessPolicy { /** access token time to live in minutes */ - @JsonProperty("access_token_ttl_in_minutes") private Long accessTokenTtlInMinutes; /** refresh token time to live in minutes */ - @JsonProperty("refresh_token_ttl_in_minutes") private Long refreshTokenTtlInMinutes; public TokenAccessPolicy setAccessTokenTtlInMinutes(Long accessTokenTtlInMinutes) { @@ -56,4 +65,40 @@ public String toString() { .add("refreshTokenTtlInMinutes", refreshTokenTtlInMinutes) .toString(); } + + TokenAccessPolicyPb toPb() { + TokenAccessPolicyPb pb = new TokenAccessPolicyPb(); + pb.setAccessTokenTtlInMinutes(accessTokenTtlInMinutes); + pb.setRefreshTokenTtlInMinutes(refreshTokenTtlInMinutes); + + return pb; + } + + static TokenAccessPolicy fromPb(TokenAccessPolicyPb pb) { + TokenAccessPolicy model = new TokenAccessPolicy(); + model.setAccessTokenTtlInMinutes(pb.getAccessTokenTtlInMinutes()); + model.setRefreshTokenTtlInMinutes(pb.getRefreshTokenTtlInMinutes()); + + return model; + } + + public static class TokenAccessPolicySerializer extends JsonSerializer { + @Override + public void serialize(TokenAccessPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenAccessPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenAccessPolicyDeserializer extends JsonDeserializer { + @Override + public TokenAccessPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenAccessPolicyPb pb = mapper.readValue(p, TokenAccessPolicyPb.class); + return TokenAccessPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicyPb.java new file mode 100755 index 000000000..cd2b1da15 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicyPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenAccessPolicyPb { + @JsonProperty("access_token_ttl_in_minutes") + private Long accessTokenTtlInMinutes; + + @JsonProperty("refresh_token_ttl_in_minutes") + private Long refreshTokenTtlInMinutes; + + public TokenAccessPolicyPb setAccessTokenTtlInMinutes(Long accessTokenTtlInMinutes) { + this.accessTokenTtlInMinutes = accessTokenTtlInMinutes; + return this; + } + + public Long getAccessTokenTtlInMinutes() { + return accessTokenTtlInMinutes; + } + + public TokenAccessPolicyPb setRefreshTokenTtlInMinutes(Long refreshTokenTtlInMinutes) { + this.refreshTokenTtlInMinutes = refreshTokenTtlInMinutes; + return this; + } + + public Long getRefreshTokenTtlInMinutes() { + return refreshTokenTtlInMinutes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenAccessPolicyPb that = (TokenAccessPolicyPb) o; + return Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes) + && Objects.equals(refreshTokenTtlInMinutes, that.refreshTokenTtlInMinutes); + } + + @Override + public int hashCode() { + return Objects.hash(accessTokenTtlInMinutes, refreshTokenTtlInMinutes); + } + + @Override + public String toString() { + return new ToStringer(TokenAccessPolicyPb.class) + .add("accessTokenTtlInMinutes", accessTokenTtlInMinutes) + .add("refreshTokenTtlInMinutes", refreshTokenTtlInMinutes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java index c30aa6595..d593a3e77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java @@ -3,21 +3,33 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update account federation policy */ @Generated +@JsonSerialize( + using = + UpdateAccountFederationPolicyRequest.UpdateAccountFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + UpdateAccountFederationPolicyRequest.UpdateAccountFederationPolicyRequestDeserializer.class) public class UpdateAccountFederationPolicyRequest { /** */ - @JsonProperty("policy") private FederationPolicy policy; /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; /** * The field mask specifies which fields of the policy to update. To specify multiple fields in @@ -26,8 +38,6 @@ public class UpdateAccountFederationPolicyRequest { * policy provided in the update request will overwrite the corresponding fields in the existing * policy. Example value: 'description,oidc_policy.audiences'. */ - @JsonIgnore - @QueryParam("update_mask") private String updateMask; public UpdateAccountFederationPolicyRequest setPolicy(FederationPolicy policy) { @@ -80,4 +90,46 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateAccountFederationPolicyRequestPb toPb() { + UpdateAccountFederationPolicyRequestPb pb = new UpdateAccountFederationPolicyRequestPb(); + pb.setPolicy(policy); + pb.setPolicyId(policyId); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateAccountFederationPolicyRequest fromPb(UpdateAccountFederationPolicyRequestPb pb) { + UpdateAccountFederationPolicyRequest model = new UpdateAccountFederationPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setPolicyId(pb.getPolicyId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateAccountFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAccountFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAccountFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAccountFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAccountFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAccountFederationPolicyRequestPb pb = + mapper.readValue(p, UpdateAccountFederationPolicyRequestPb.class); + return UpdateAccountFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequestPb.java new file mode 100755 index 000000000..8e7986aa5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update account federation policy */ +@Generated +class UpdateAccountFederationPolicyRequestPb { + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore private String policyId; + + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateAccountFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public UpdateAccountFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public UpdateAccountFederationPolicyRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAccountFederationPolicyRequestPb that = (UpdateAccountFederationPolicyRequestPb) o; + return Objects.equals(policy, that.policy) + && Objects.equals(policyId, that.policyId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(policy, policyId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateAccountFederationPolicyRequestPb.class) + .add("policy", policy) + .add("policyId", policyId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java index e310550a4..a6f49b1b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java @@ -4,36 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCustomAppIntegration.UpdateCustomAppIntegrationSerializer.class) +@JsonDeserialize(using = UpdateCustomAppIntegration.UpdateCustomAppIntegrationDeserializer.class) public class UpdateCustomAppIntegration { /** */ - @JsonIgnore private String integrationId; + private String integrationId; /** List of OAuth redirect urls to be updated in the custom OAuth app integration */ - @JsonProperty("redirect_urls") private Collection redirectUrls; /** * List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect * URIs this will fully replace the existing values instead of appending */ - @JsonProperty("scopes") private Collection scopes; /** Token access policy to be updated in the custom OAuth app integration */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; /** * Scopes that will need to be consented by end user to mint the access token. If the user does * not authorize the access token will not be minted. Must be a subset of scopes. */ - @JsonProperty("user_authorized_scopes") private Collection userAuthorizedScopes; public UpdateCustomAppIntegration setIntegrationId(String integrationId) { @@ -110,4 +116,49 @@ public String toString() { .add("userAuthorizedScopes", userAuthorizedScopes) .toString(); } + + UpdateCustomAppIntegrationPb toPb() { + UpdateCustomAppIntegrationPb pb = new UpdateCustomAppIntegrationPb(); + pb.setIntegrationId(integrationId); + pb.setRedirectUrls(redirectUrls); + pb.setScopes(scopes); + pb.setTokenAccessPolicy(tokenAccessPolicy); + pb.setUserAuthorizedScopes(userAuthorizedScopes); + + return pb; + } + + static UpdateCustomAppIntegration fromPb(UpdateCustomAppIntegrationPb pb) { + UpdateCustomAppIntegration model = new UpdateCustomAppIntegration(); + model.setIntegrationId(pb.getIntegrationId()); + model.setRedirectUrls(pb.getRedirectUrls()); + model.setScopes(pb.getScopes()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + model.setUserAuthorizedScopes(pb.getUserAuthorizedScopes()); + + return model; + } + + public static class UpdateCustomAppIntegrationSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCustomAppIntegration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCustomAppIntegrationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCustomAppIntegrationDeserializer + extends JsonDeserializer { + @Override + public UpdateCustomAppIntegration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCustomAppIntegrationPb pb = mapper.readValue(p, UpdateCustomAppIntegrationPb.class); + return UpdateCustomAppIntegration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java index 45f262320..e0d49809e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateCustomAppIntegrationOutput.UpdateCustomAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = UpdateCustomAppIntegrationOutput.UpdateCustomAppIntegrationOutputDeserializer.class) public class UpdateCustomAppIntegrationOutput { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateCustomAppIntegrationOutput.class).toString(); } + + UpdateCustomAppIntegrationOutputPb toPb() { + UpdateCustomAppIntegrationOutputPb pb = new UpdateCustomAppIntegrationOutputPb(); + + return pb; + } + + static UpdateCustomAppIntegrationOutput fromPb(UpdateCustomAppIntegrationOutputPb pb) { + UpdateCustomAppIntegrationOutput model = new UpdateCustomAppIntegrationOutput(); + + return model; + } + + public static class UpdateCustomAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCustomAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCustomAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCustomAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public UpdateCustomAppIntegrationOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCustomAppIntegrationOutputPb pb = + mapper.readValue(p, UpdateCustomAppIntegrationOutputPb.class); + return UpdateCustomAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutputPb.java new file mode 100755 index 000000000..a5edd276f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutputPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateCustomAppIntegrationOutputPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateCustomAppIntegrationOutputPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationPb.java new file mode 100755 index 000000000..27da6c920 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationPb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateCustomAppIntegrationPb { + @JsonIgnore private String integrationId; + + @JsonProperty("redirect_urls") + private Collection redirectUrls; + + @JsonProperty("scopes") + private Collection scopes; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + @JsonProperty("user_authorized_scopes") + private Collection userAuthorizedScopes; + + public UpdateCustomAppIntegrationPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + public UpdateCustomAppIntegrationPb setRedirectUrls(Collection redirectUrls) { + this.redirectUrls = redirectUrls; + return this; + } + + public Collection getRedirectUrls() { + return redirectUrls; + } + + public UpdateCustomAppIntegrationPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + public UpdateCustomAppIntegrationPb setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + public UpdateCustomAppIntegrationPb setUserAuthorizedScopes( + Collection userAuthorizedScopes) { + this.userAuthorizedScopes = userAuthorizedScopes; + return this; + } + + public Collection getUserAuthorizedScopes() { + return userAuthorizedScopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCustomAppIntegrationPb that = (UpdateCustomAppIntegrationPb) o; + return Objects.equals(integrationId, that.integrationId) + && Objects.equals(redirectUrls, that.redirectUrls) + && Objects.equals(scopes, that.scopes) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy) + && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes); + } + + @Override + public int hashCode() { + return Objects.hash( + integrationId, redirectUrls, scopes, tokenAccessPolicy, userAuthorizedScopes); + } + + @Override + public String toString() { + return new ToStringer(UpdateCustomAppIntegrationPb.class) + .add("integrationId", integrationId) + .add("redirectUrls", redirectUrls) + .add("scopes", scopes) + .add("tokenAccessPolicy", tokenAccessPolicy) + .add("userAuthorizedScopes", userAuthorizedScopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java index 0ca00b1c1..1342b86f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdatePublishedAppIntegration.UpdatePublishedAppIntegrationSerializer.class) +@JsonDeserialize( + using = UpdatePublishedAppIntegration.UpdatePublishedAppIntegrationDeserializer.class) public class UpdatePublishedAppIntegration { /** */ - @JsonIgnore private String integrationId; + private String integrationId; /** Token access policy to be updated in the published OAuth app integration */ - @JsonProperty("token_access_policy") private TokenAccessPolicy tokenAccessPolicy; public UpdatePublishedAppIntegration setIntegrationId(String integrationId) { @@ -56,4 +66,44 @@ public String toString() { .add("tokenAccessPolicy", tokenAccessPolicy) .toString(); } + + UpdatePublishedAppIntegrationPb toPb() { + UpdatePublishedAppIntegrationPb pb = new UpdatePublishedAppIntegrationPb(); + pb.setIntegrationId(integrationId); + pb.setTokenAccessPolicy(tokenAccessPolicy); + + return pb; + } + + static UpdatePublishedAppIntegration fromPb(UpdatePublishedAppIntegrationPb pb) { + UpdatePublishedAppIntegration model = new UpdatePublishedAppIntegration(); + model.setIntegrationId(pb.getIntegrationId()); + model.setTokenAccessPolicy(pb.getTokenAccessPolicy()); + + return model; + } + + public static class UpdatePublishedAppIntegrationSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePublishedAppIntegration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePublishedAppIntegrationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePublishedAppIntegrationDeserializer + extends JsonDeserializer { + @Override + public UpdatePublishedAppIntegration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePublishedAppIntegrationPb pb = + mapper.readValue(p, UpdatePublishedAppIntegrationPb.class); + return UpdatePublishedAppIntegration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java index efa8717d1..84dd89583 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java @@ -4,9 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdatePublishedAppIntegrationOutput.UpdatePublishedAppIntegrationOutputSerializer.class) +@JsonDeserialize( + using = + UpdatePublishedAppIntegrationOutput.UpdatePublishedAppIntegrationOutputDeserializer.class) public class UpdatePublishedAppIntegrationOutput { @Override @@ -25,4 +40,40 @@ public int hashCode() { public String toString() { return new ToStringer(UpdatePublishedAppIntegrationOutput.class).toString(); } + + UpdatePublishedAppIntegrationOutputPb toPb() { + UpdatePublishedAppIntegrationOutputPb pb = new UpdatePublishedAppIntegrationOutputPb(); + + return pb; + } + + static UpdatePublishedAppIntegrationOutput fromPb(UpdatePublishedAppIntegrationOutputPb pb) { + UpdatePublishedAppIntegrationOutput model = new UpdatePublishedAppIntegrationOutput(); + + return model; + } + + public static class UpdatePublishedAppIntegrationOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePublishedAppIntegrationOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePublishedAppIntegrationOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePublishedAppIntegrationOutputDeserializer + extends JsonDeserializer { + @Override + public UpdatePublishedAppIntegrationOutput deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePublishedAppIntegrationOutputPb pb = + mapper.readValue(p, UpdatePublishedAppIntegrationOutputPb.class); + return UpdatePublishedAppIntegrationOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutputPb.java new file mode 100755 index 000000000..a667a1631 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutputPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdatePublishedAppIntegrationOutputPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdatePublishedAppIntegrationOutputPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationPb.java new file mode 100755 index 000000000..cbfad6b0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdatePublishedAppIntegrationPb { + @JsonIgnore private String integrationId; + + @JsonProperty("token_access_policy") + private TokenAccessPolicy tokenAccessPolicy; + + public UpdatePublishedAppIntegrationPb setIntegrationId(String integrationId) { + this.integrationId = integrationId; + return this; + } + + public String getIntegrationId() { + return integrationId; + } + + public UpdatePublishedAppIntegrationPb setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) { + this.tokenAccessPolicy = tokenAccessPolicy; + return this; + } + + public TokenAccessPolicy getTokenAccessPolicy() { + return tokenAccessPolicy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePublishedAppIntegrationPb that = (UpdatePublishedAppIntegrationPb) o; + return Objects.equals(integrationId, that.integrationId) + && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(integrationId, tokenAccessPolicy); + } + + @Override + public String toString() { + return new ToStringer(UpdatePublishedAppIntegrationPb.class) + .add("integrationId", integrationId) + .add("tokenAccessPolicy", tokenAccessPolicy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java index 4f03417eb..8bd98a6ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java @@ -3,24 +3,38 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update service principal federation policy */ @Generated +@JsonSerialize( + using = + UpdateServicePrincipalFederationPolicyRequest + .UpdateServicePrincipalFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = + UpdateServicePrincipalFederationPolicyRequest + .UpdateServicePrincipalFederationPolicyRequestDeserializer.class) public class UpdateServicePrincipalFederationPolicyRequest { /** */ - @JsonProperty("policy") private FederationPolicy policy; /** The identifier for the federation policy. */ - @JsonIgnore private String policyId; + private String policyId; /** The service principal id for the federation policy. */ - @JsonIgnore private Long servicePrincipalId; + private Long servicePrincipalId; /** * The field mask specifies which fields of the policy to update. To specify multiple fields in @@ -29,8 +43,6 @@ public class UpdateServicePrincipalFederationPolicyRequest { * policy provided in the update request will overwrite the corresponding fields in the existing * policy. Example value: 'description,oidc_policy.audiences'. */ - @JsonIgnore - @QueryParam("update_mask") private String updateMask; public UpdateServicePrincipalFederationPolicyRequest setPolicy(FederationPolicy policy) { @@ -96,4 +108,53 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateServicePrincipalFederationPolicyRequestPb toPb() { + UpdateServicePrincipalFederationPolicyRequestPb pb = + new UpdateServicePrincipalFederationPolicyRequestPb(); + pb.setPolicy(policy); + pb.setPolicyId(policyId); + pb.setServicePrincipalId(servicePrincipalId); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateServicePrincipalFederationPolicyRequest fromPb( + UpdateServicePrincipalFederationPolicyRequestPb pb) { + UpdateServicePrincipalFederationPolicyRequest model = + new UpdateServicePrincipalFederationPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setPolicyId(pb.getPolicyId()); + model.setServicePrincipalId(pb.getServicePrincipalId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateServicePrincipalFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateServicePrincipalFederationPolicyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateServicePrincipalFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateServicePrincipalFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateServicePrincipalFederationPolicyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateServicePrincipalFederationPolicyRequestPb pb = + mapper.readValue(p, UpdateServicePrincipalFederationPolicyRequestPb.class); + return UpdateServicePrincipalFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequestPb.java new file mode 100755 index 000000000..6347c1a47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequestPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.oauth2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update service principal federation policy */ +@Generated +class UpdateServicePrincipalFederationPolicyRequestPb { + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore private String policyId; + + @JsonIgnore private Long servicePrincipalId; + + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateServicePrincipalFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public UpdateServicePrincipalFederationPolicyRequestPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public UpdateServicePrincipalFederationPolicyRequestPb setServicePrincipalId( + Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + + public UpdateServicePrincipalFederationPolicyRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateServicePrincipalFederationPolicyRequestPb that = + (UpdateServicePrincipalFederationPolicyRequestPb) o; + return Objects.equals(policy, that.policy) + && Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(policy, policyId, servicePrincipalId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateServicePrincipalFederationPolicyRequestPb.class) + .add("policy", policy) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Converters.java new file mode 100755 index 000000000..c7a70261d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.pipelines; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index b81984eca..67b85ee23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePipeline.CreatePipelineSerializer.class) +@JsonDeserialize(using = CreatePipeline.CreatePipelineDeserializer.class) public class CreatePipeline { /** If false, deployment will fail if name conflicts with that of another pipeline. */ - @JsonProperty("allow_duplicate_names") private Boolean allowDuplicateNames; /** Budget policy of this pipeline. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** @@ -25,82 +34,63 @@ public class CreatePipeline { * `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity * Catalog. */ - @JsonProperty("catalog") private String catalog; /** DLT Release Channel that specifies which version to use. */ - @JsonProperty("channel") private String channel; /** Cluster settings for this pipeline deployment. */ - @JsonProperty("clusters") private Collection clusters; /** String-String configuration for this pipeline execution. */ - @JsonProperty("configuration") private Map configuration; /** Whether the pipeline is continuous or triggered. This replaces `trigger`. */ - @JsonProperty("continuous") private Boolean continuous; /** Deployment type of this pipeline. */ - @JsonProperty("deployment") private PipelineDeployment deployment; /** Whether the pipeline is in Development mode. Defaults to false. */ - @JsonProperty("development") private Boolean development; /** */ - @JsonProperty("dry_run") private Boolean dryRun; /** Pipeline product edition. */ - @JsonProperty("edition") private String edition; /** Event log configuration for this pipeline */ - @JsonProperty("event_log") private EventLogSpec eventLog; /** Filters on which Pipeline packages to include in the deployed graph. */ - @JsonProperty("filters") private Filters filters; /** The definition of a gateway pipeline to support change data capture. */ - @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; /** Unique identifier for this pipeline. */ - @JsonProperty("id") private String id; /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the * 'libraries', 'schema', 'target', or 'catalog' settings. */ - @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; /** Libraries or code needed by this deployment. */ - @JsonProperty("libraries") private Collection libraries; /** Friendly identifier for this pipeline. */ - @JsonProperty("name") private String name; /** List of notification settings for this pipeline. */ - @JsonProperty("notifications") private Collection notifications; /** Whether Photon is enabled for this pipeline. */ - @JsonProperty("photon") private Boolean photon; /** Restart window of this pipeline. */ - @JsonProperty("restart_window") private RestartWindow restartWindow; /** @@ -108,7 +98,6 @@ public class CreatePipeline { * the Databricks user interface and it is added to sys.path when executing Python sources during * pipeline execution. */ - @JsonProperty("root_path") private String rootPath; /** @@ -119,19 +108,15 @@ public class CreatePipeline { *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an * error is thrown. */ - @JsonProperty("run_as") private RunAs runAs; /** The default schema (database) where tables are read from or published to. */ - @JsonProperty("schema") private String schema; /** Whether serverless compute is enabled for this pipeline. */ - @JsonProperty("serverless") private Boolean serverless; /** DBFS root directory for storing checkpoints and tables. */ - @JsonProperty("storage") private String storage; /** @@ -139,7 +124,6 @@ public class CreatePipeline { * and are therefore subject to the same limitations. A maximum of 25 tags can be added to the * pipeline. */ - @JsonProperty("tags") private Map tags; /** @@ -147,11 +131,9 @@ public class CreatePipeline { * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is * deprecated for pipeline creation in favor of the `schema` field. */ - @JsonProperty("target") private String target; /** Which pipeline trigger to use. Deprecated: Use `continuous` instead. */ - @JsonProperty("trigger") private PipelineTrigger trigger; public CreatePipeline setAllowDuplicateNames(Boolean allowDuplicateNames) { @@ -519,4 +501,94 @@ public String toString() { .add("trigger", trigger) .toString(); } + + CreatePipelinePb toPb() { + CreatePipelinePb pb = new CreatePipelinePb(); + pb.setAllowDuplicateNames(allowDuplicateNames); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setCatalog(catalog); + pb.setChannel(channel); + pb.setClusters(clusters); + pb.setConfiguration(configuration); + pb.setContinuous(continuous); + pb.setDeployment(deployment); + pb.setDevelopment(development); + pb.setDryRun(dryRun); + pb.setEdition(edition); + pb.setEventLog(eventLog); + pb.setFilters(filters); + pb.setGatewayDefinition(gatewayDefinition); + pb.setId(id); + pb.setIngestionDefinition(ingestionDefinition); + pb.setLibraries(libraries); + pb.setName(name); + pb.setNotifications(notifications); + pb.setPhoton(photon); + pb.setRestartWindow(restartWindow); + pb.setRootPath(rootPath); + pb.setRunAs(runAs); + pb.setSchema(schema); + pb.setServerless(serverless); + pb.setStorage(storage); + pb.setTags(tags); + pb.setTarget(target); + pb.setTrigger(trigger); + + return pb; + } + + static CreatePipeline fromPb(CreatePipelinePb pb) { + CreatePipeline model = new CreatePipeline(); + model.setAllowDuplicateNames(pb.getAllowDuplicateNames()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setCatalog(pb.getCatalog()); + model.setChannel(pb.getChannel()); + model.setClusters(pb.getClusters()); + model.setConfiguration(pb.getConfiguration()); + model.setContinuous(pb.getContinuous()); + model.setDeployment(pb.getDeployment()); + model.setDevelopment(pb.getDevelopment()); + model.setDryRun(pb.getDryRun()); + model.setEdition(pb.getEdition()); + model.setEventLog(pb.getEventLog()); + model.setFilters(pb.getFilters()); + model.setGatewayDefinition(pb.getGatewayDefinition()); + model.setId(pb.getId()); + model.setIngestionDefinition(pb.getIngestionDefinition()); + model.setLibraries(pb.getLibraries()); + model.setName(pb.getName()); + model.setNotifications(pb.getNotifications()); + model.setPhoton(pb.getPhoton()); + model.setRestartWindow(pb.getRestartWindow()); + model.setRootPath(pb.getRootPath()); + model.setRunAs(pb.getRunAs()); + model.setSchema(pb.getSchema()); + model.setServerless(pb.getServerless()); + model.setStorage(pb.getStorage()); + model.setTags(pb.getTags()); + model.setTarget(pb.getTarget()); + model.setTrigger(pb.getTrigger()); + + return model; + } + + public static class CreatePipelineSerializer extends JsonSerializer { + @Override + public void serialize(CreatePipeline value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePipelinePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePipelineDeserializer extends JsonDeserializer { + @Override + public CreatePipeline deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePipelinePb pb = mapper.readValue(p, CreatePipelinePb.class); + return CreatePipeline.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelinePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelinePb.java new file mode 100755 index 000000000..0c05ea1df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelinePb.java @@ -0,0 +1,467 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreatePipelinePb { + @JsonProperty("allow_duplicate_names") + private Boolean allowDuplicateNames; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("channel") + private String channel; + + @JsonProperty("clusters") + private Collection clusters; + + @JsonProperty("configuration") + private Map configuration; + + @JsonProperty("continuous") + private Boolean continuous; + + @JsonProperty("deployment") + private PipelineDeployment deployment; + + @JsonProperty("development") + private Boolean development; + + @JsonProperty("dry_run") + private Boolean dryRun; + + @JsonProperty("edition") + private String edition; + + @JsonProperty("event_log") + private EventLogSpec eventLog; + + @JsonProperty("filters") + private Filters filters; + + @JsonProperty("gateway_definition") + private IngestionGatewayPipelineDefinition gatewayDefinition; + + @JsonProperty("id") + private String id; + + @JsonProperty("ingestion_definition") + private IngestionPipelineDefinition ingestionDefinition; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("name") + private String name; + + @JsonProperty("notifications") + private Collection notifications; + + @JsonProperty("photon") + private Boolean photon; + + @JsonProperty("restart_window") + private RestartWindow restartWindow; + + @JsonProperty("root_path") + private String rootPath; + + @JsonProperty("run_as") + private RunAs runAs; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("serverless") + private Boolean serverless; + + @JsonProperty("storage") + private String storage; + + @JsonProperty("tags") + private Map tags; + + @JsonProperty("target") + private String target; + + @JsonProperty("trigger") + private PipelineTrigger trigger; + + public CreatePipelinePb setAllowDuplicateNames(Boolean allowDuplicateNames) { + this.allowDuplicateNames = allowDuplicateNames; + return this; + } + + public Boolean getAllowDuplicateNames() { + return allowDuplicateNames; + } + + public CreatePipelinePb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreatePipelinePb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public CreatePipelinePb setChannel(String channel) { + this.channel = channel; + return this; + } + + public String getChannel() { + return channel; + } + + public CreatePipelinePb setClusters(Collection clusters) { + this.clusters = clusters; + return this; + } + + public Collection getClusters() { + return clusters; + } + + public CreatePipelinePb setConfiguration(Map configuration) { + this.configuration = configuration; + return this; + } + + public Map getConfiguration() { + return configuration; + } + + public CreatePipelinePb setContinuous(Boolean continuous) { + this.continuous = continuous; + return this; + } + + public Boolean getContinuous() { + return continuous; + } + + public CreatePipelinePb setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + + public CreatePipelinePb setDevelopment(Boolean development) { + this.development = development; + return this; + } + + public Boolean getDevelopment() { + return development; + } + + public CreatePipelinePb setDryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + public Boolean getDryRun() { + return dryRun; + } + + public CreatePipelinePb setEdition(String edition) { + this.edition = edition; + return this; + } + + public String getEdition() { + return edition; + } + + public CreatePipelinePb setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + + public CreatePipelinePb setFilters(Filters filters) { + this.filters = filters; + return this; + } + + public Filters getFilters() { + return filters; + } + + public CreatePipelinePb setGatewayDefinition( + IngestionGatewayPipelineDefinition gatewayDefinition) { + this.gatewayDefinition = gatewayDefinition; + return this; + } + + public IngestionGatewayPipelineDefinition getGatewayDefinition() { + return gatewayDefinition; + } + + public CreatePipelinePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CreatePipelinePb setIngestionDefinition(IngestionPipelineDefinition ingestionDefinition) { + this.ingestionDefinition = ingestionDefinition; + return this; + } + + public IngestionPipelineDefinition getIngestionDefinition() { + return ingestionDefinition; + } + + public CreatePipelinePb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public CreatePipelinePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreatePipelinePb setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public CreatePipelinePb setPhoton(Boolean photon) { + this.photon = photon; + return this; + } + + public Boolean getPhoton() { + return photon; + } + + public CreatePipelinePb setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + + public CreatePipelinePb setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + + public CreatePipelinePb setRunAs(RunAs runAs) { + this.runAs = runAs; + return this; + } + + public RunAs getRunAs() { + return runAs; + } + + public CreatePipelinePb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public CreatePipelinePb setServerless(Boolean serverless) { + this.serverless = serverless; + return this; + } + + public Boolean getServerless() { + return serverless; + } + + public CreatePipelinePb setStorage(String storage) { + this.storage = storage; + return this; + } + + public String getStorage() { + return storage; + } + + public CreatePipelinePb setTags(Map tags) { + this.tags = tags; + return this; + } + + public Map getTags() { + return tags; + } + + public CreatePipelinePb setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + public CreatePipelinePb setTrigger(PipelineTrigger trigger) { + this.trigger = trigger; + return this; + } + + public PipelineTrigger getTrigger() { + return trigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePipelinePb that = (CreatePipelinePb) o; + return Objects.equals(allowDuplicateNames, that.allowDuplicateNames) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(catalog, that.catalog) + && Objects.equals(channel, that.channel) + && Objects.equals(clusters, that.clusters) + && Objects.equals(configuration, that.configuration) + && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) + && Objects.equals(development, that.development) + && Objects.equals(dryRun, that.dryRun) + && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) + && Objects.equals(filters, that.filters) + && Objects.equals(gatewayDefinition, that.gatewayDefinition) + && Objects.equals(id, that.id) + && Objects.equals(ingestionDefinition, that.ingestionDefinition) + && Objects.equals(libraries, that.libraries) + && Objects.equals(name, that.name) + && Objects.equals(notifications, that.notifications) + && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) + && Objects.equals(runAs, that.runAs) + && Objects.equals(schema, that.schema) + && Objects.equals(serverless, that.serverless) + && Objects.equals(storage, that.storage) + && Objects.equals(tags, that.tags) + && Objects.equals(target, that.target) + && Objects.equals(trigger, that.trigger); + } + + @Override + public int hashCode() { + return Objects.hash( + allowDuplicateNames, + budgetPolicyId, + catalog, + channel, + clusters, + configuration, + continuous, + deployment, + development, + dryRun, + edition, + eventLog, + filters, + gatewayDefinition, + id, + ingestionDefinition, + libraries, + name, + notifications, + photon, + restartWindow, + rootPath, + runAs, + schema, + serverless, + storage, + tags, + target, + trigger); + } + + @Override + public String toString() { + return new ToStringer(CreatePipelinePb.class) + .add("allowDuplicateNames", allowDuplicateNames) + .add("budgetPolicyId", budgetPolicyId) + .add("catalog", catalog) + .add("channel", channel) + .add("clusters", clusters) + .add("configuration", configuration) + .add("continuous", continuous) + .add("deployment", deployment) + .add("development", development) + .add("dryRun", dryRun) + .add("edition", edition) + .add("eventLog", eventLog) + .add("filters", filters) + .add("gatewayDefinition", gatewayDefinition) + .add("id", id) + .add("ingestionDefinition", ingestionDefinition) + .add("libraries", libraries) + .add("name", name) + .add("notifications", notifications) + .add("photon", photon) + .add("restartWindow", restartWindow) + .add("rootPath", rootPath) + .add("runAs", runAs) + .add("schema", schema) + .add("serverless", serverless) + .add("storage", storage) + .add("tags", tags) + .add("target", target) + .add("trigger", trigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java index edfeb2f33..fb820482e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePipelineResponse.CreatePipelineResponseSerializer.class) +@JsonDeserialize(using = CreatePipelineResponse.CreatePipelineResponseDeserializer.class) public class CreatePipelineResponse { /** Only returned when dry_run is true. */ - @JsonProperty("effective_settings") private PipelineSpec effectiveSettings; /** The unique identifier for the newly created pipeline. Only returned when dry_run is false. */ - @JsonProperty("pipeline_id") private String pipelineId; public CreatePipelineResponse setEffectiveSettings(PipelineSpec effectiveSettings) { @@ -56,4 +65,43 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + CreatePipelineResponsePb toPb() { + CreatePipelineResponsePb pb = new CreatePipelineResponsePb(); + pb.setEffectiveSettings(effectiveSettings); + pb.setPipelineId(pipelineId); + + return pb; + } + + static CreatePipelineResponse fromPb(CreatePipelineResponsePb pb) { + CreatePipelineResponse model = new CreatePipelineResponse(); + model.setEffectiveSettings(pb.getEffectiveSettings()); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class CreatePipelineResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePipelineResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePipelineResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePipelineResponseDeserializer + extends JsonDeserializer { + @Override + public CreatePipelineResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePipelineResponsePb pb = mapper.readValue(p, CreatePipelineResponsePb.class); + return CreatePipelineResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponsePb.java new file mode 100755 index 000000000..0a375a986 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreatePipelineResponsePb { + @JsonProperty("effective_settings") + private PipelineSpec effectiveSettings; + + @JsonProperty("pipeline_id") + private String pipelineId; + + public CreatePipelineResponsePb setEffectiveSettings(PipelineSpec effectiveSettings) { + this.effectiveSettings = effectiveSettings; + return this; + } + + public PipelineSpec getEffectiveSettings() { + return effectiveSettings; + } + + public CreatePipelineResponsePb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePipelineResponsePb that = (CreatePipelineResponsePb) o; + return Objects.equals(effectiveSettings, that.effectiveSettings) + && Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(effectiveSettings, pipelineId); + } + + @Override + public String toString() { + return new ToStringer(CreatePipelineResponsePb.class) + .add("effectiveSettings", effectiveSettings) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java index bc550fc4f..182a7bf88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CronTrigger.CronTriggerSerializer.class) +@JsonDeserialize(using = CronTrigger.CronTriggerDeserializer.class) public class CronTrigger { /** */ - @JsonProperty("quartz_cron_schedule") private String quartzCronSchedule; /** */ - @JsonProperty("timezone_id") private String timezoneId; public CronTrigger setQuartzCronSchedule(String quartzCronSchedule) { @@ -56,4 +65,39 @@ public String toString() { .add("timezoneId", timezoneId) .toString(); } + + CronTriggerPb toPb() { + CronTriggerPb pb = new CronTriggerPb(); + pb.setQuartzCronSchedule(quartzCronSchedule); + pb.setTimezoneId(timezoneId); + + return pb; + } + + static CronTrigger fromPb(CronTriggerPb pb) { + CronTrigger model = new CronTrigger(); + model.setQuartzCronSchedule(pb.getQuartzCronSchedule()); + model.setTimezoneId(pb.getTimezoneId()); + + return model; + } + + public static class CronTriggerSerializer extends JsonSerializer { + @Override + public void serialize(CronTrigger value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CronTriggerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CronTriggerDeserializer extends JsonDeserializer { + @Override + public CronTrigger deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CronTriggerPb pb = mapper.readValue(p, CronTriggerPb.class); + return CronTrigger.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTriggerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTriggerPb.java new file mode 100755 index 000000000..fa33e990e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTriggerPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CronTriggerPb { + @JsonProperty("quartz_cron_schedule") + private String quartzCronSchedule; + + @JsonProperty("timezone_id") + private String timezoneId; + + public CronTriggerPb setQuartzCronSchedule(String quartzCronSchedule) { + this.quartzCronSchedule = quartzCronSchedule; + return this; + } + + public String getQuartzCronSchedule() { + return quartzCronSchedule; + } + + public CronTriggerPb setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronTriggerPb that = (CronTriggerPb) o; + return Objects.equals(quartzCronSchedule, that.quartzCronSchedule) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(quartzCronSchedule, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(CronTriggerPb.class) + .add("quartzCronSchedule", quartzCronSchedule) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java index 8c09e8223..2a753a4e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DataPlaneId.DataPlaneIdSerializer.class) +@JsonDeserialize(using = DataPlaneId.DataPlaneIdDeserializer.class) public class DataPlaneId { /** The instance name of the data plane emitting an event. */ - @JsonProperty("instance") private String instance; /** A sequence number, unique and increasing within the data plane instance. */ - @JsonProperty("seq_no") private Long seqNo; public DataPlaneId setInstance(String instance) { @@ -55,4 +64,39 @@ public String toString() { .add("seqNo", seqNo) .toString(); } + + DataPlaneIdPb toPb() { + DataPlaneIdPb pb = new DataPlaneIdPb(); + pb.setInstance(instance); + pb.setSeqNo(seqNo); + + return pb; + } + + static DataPlaneId fromPb(DataPlaneIdPb pb) { + DataPlaneId model = new DataPlaneId(); + model.setInstance(pb.getInstance()); + model.setSeqNo(pb.getSeqNo()); + + return model; + } + + public static class DataPlaneIdSerializer extends JsonSerializer { + @Override + public void serialize(DataPlaneId value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataPlaneIdPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataPlaneIdDeserializer extends JsonDeserializer { + @Override + public DataPlaneId deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataPlaneIdPb pb = mapper.readValue(p, DataPlaneIdPb.class); + return DataPlaneId.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneIdPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneIdPb.java new file mode 100755 index 000000000..b4464a6e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneIdPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DataPlaneIdPb { + @JsonProperty("instance") + private String instance; + + @JsonProperty("seq_no") + private Long seqNo; + + public DataPlaneIdPb setInstance(String instance) { + this.instance = instance; + return this; + } + + public String getInstance() { + return instance; + } + + public DataPlaneIdPb setSeqNo(Long seqNo) { + this.seqNo = seqNo; + return this; + } + + public Long getSeqNo() { + return seqNo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataPlaneIdPb that = (DataPlaneIdPb) o; + return Objects.equals(instance, that.instance) && Objects.equals(seqNo, that.seqNo); + } + + @Override + public int hashCode() { + return Objects.hash(instance, seqNo); + } + + @Override + public String toString() { + return new ToStringer(DataPlaneIdPb.class) + .add("instance", instance) + .add("seqNo", seqNo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java index 53f84666e..d42cf93f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a pipeline */ @Generated +@JsonSerialize(using = DeletePipelineRequest.DeletePipelineRequestSerializer.class) +@JsonDeserialize(using = DeletePipelineRequest.DeletePipelineRequestDeserializer.class) public class DeletePipelineRequest { /** */ - @JsonIgnore private String pipelineId; + private String pipelineId; public DeletePipelineRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePipelineRequest.class).add("pipelineId", pipelineId).toString(); } + + DeletePipelineRequestPb toPb() { + DeletePipelineRequestPb pb = new DeletePipelineRequestPb(); + pb.setPipelineId(pipelineId); + + return pb; + } + + static DeletePipelineRequest fromPb(DeletePipelineRequestPb pb) { + DeletePipelineRequest model = new DeletePipelineRequest(); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class DeletePipelineRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePipelineRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePipelineRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePipelineRequestDeserializer + extends JsonDeserializer { + @Override + public DeletePipelineRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePipelineRequestPb pb = mapper.readValue(p, DeletePipelineRequestPb.class); + return DeletePipelineRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequestPb.java new file mode 100755 index 000000000..22427a600 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a pipeline */ +@Generated +class DeletePipelineRequestPb { + @JsonIgnore private String pipelineId; + + public DeletePipelineRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePipelineRequestPb that = (DeletePipelineRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(DeletePipelineRequestPb.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java index 103293d0f..30053012d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeletePipelineResponse.DeletePipelineResponseSerializer.class) +@JsonDeserialize(using = DeletePipelineResponse.DeletePipelineResponseDeserializer.class) public class DeletePipelineResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePipelineResponse.class).toString(); } + + DeletePipelineResponsePb toPb() { + DeletePipelineResponsePb pb = new DeletePipelineResponsePb(); + + return pb; + } + + static DeletePipelineResponse fromPb(DeletePipelineResponsePb pb) { + DeletePipelineResponse model = new DeletePipelineResponse(); + + return model; + } + + public static class DeletePipelineResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePipelineResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePipelineResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePipelineResponseDeserializer + extends JsonDeserializer { + @Override + public DeletePipelineResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePipelineResponsePb pb = mapper.readValue(p, DeletePipelineResponsePb.class); + return DeletePipelineResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponsePb.java new file mode 100755 index 000000000..b0188908a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeletePipelineResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeletePipelineResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 776b17166..209450c7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = EditPipeline.EditPipelineSerializer.class) +@JsonDeserialize(using = EditPipeline.EditPipelineDeserializer.class) public class EditPipeline { /** * If false, deployment will fail if name has changed and conflicts the name of another pipeline. */ - @JsonProperty("allow_duplicate_names") private Boolean allowDuplicateNames; /** Budget policy of this pipeline. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** @@ -28,88 +36,69 @@ public class EditPipeline { * `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity * Catalog. */ - @JsonProperty("catalog") private String catalog; /** DLT Release Channel that specifies which version to use. */ - @JsonProperty("channel") private String channel; /** Cluster settings for this pipeline deployment. */ - @JsonProperty("clusters") private Collection clusters; /** String-String configuration for this pipeline execution. */ - @JsonProperty("configuration") private Map configuration; /** Whether the pipeline is continuous or triggered. This replaces `trigger`. */ - @JsonProperty("continuous") private Boolean continuous; /** Deployment type of this pipeline. */ - @JsonProperty("deployment") private PipelineDeployment deployment; /** Whether the pipeline is in Development mode. Defaults to false. */ - @JsonProperty("development") private Boolean development; /** Pipeline product edition. */ - @JsonProperty("edition") private String edition; /** Event log configuration for this pipeline */ - @JsonProperty("event_log") private EventLogSpec eventLog; /** * If present, the last-modified time of the pipeline settings before the edit. If the settings * were modified after that time, then the request will fail with a conflict. */ - @JsonProperty("expected_last_modified") private Long expectedLastModified; /** Filters on which Pipeline packages to include in the deployed graph. */ - @JsonProperty("filters") private Filters filters; /** The definition of a gateway pipeline to support change data capture. */ - @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; /** Unique identifier for this pipeline. */ - @JsonProperty("id") private String id; /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the * 'libraries', 'schema', 'target', or 'catalog' settings. */ - @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; /** Libraries or code needed by this deployment. */ - @JsonProperty("libraries") private Collection libraries; /** Friendly identifier for this pipeline. */ - @JsonProperty("name") private String name; /** List of notification settings for this pipeline. */ - @JsonProperty("notifications") private Collection notifications; /** Whether Photon is enabled for this pipeline. */ - @JsonProperty("photon") private Boolean photon; /** Unique identifier for this pipeline. */ - @JsonIgnore private String pipelineId; + private String pipelineId; /** Restart window of this pipeline. */ - @JsonProperty("restart_window") private RestartWindow restartWindow; /** @@ -117,7 +106,6 @@ public class EditPipeline { * the Databricks user interface and it is added to sys.path when executing Python sources during * pipeline execution. */ - @JsonProperty("root_path") private String rootPath; /** @@ -128,19 +116,15 @@ public class EditPipeline { *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an * error is thrown. */ - @JsonProperty("run_as") private RunAs runAs; /** The default schema (database) where tables are read from or published to. */ - @JsonProperty("schema") private String schema; /** Whether serverless compute is enabled for this pipeline. */ - @JsonProperty("serverless") private Boolean serverless; /** DBFS root directory for storing checkpoints and tables. */ - @JsonProperty("storage") private String storage; /** @@ -148,7 +132,6 @@ public class EditPipeline { * and are therefore subject to the same limitations. A maximum of 25 tags can be added to the * pipeline. */ - @JsonProperty("tags") private Map tags; /** @@ -156,11 +139,9 @@ public class EditPipeline { * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is * deprecated for pipeline creation in favor of the `schema` field. */ - @JsonProperty("target") private String target; /** Which pipeline trigger to use. Deprecated: Use `continuous` instead. */ - @JsonProperty("trigger") private PipelineTrigger trigger; public EditPipeline setAllowDuplicateNames(Boolean allowDuplicateNames) { @@ -540,4 +521,95 @@ public String toString() { .add("trigger", trigger) .toString(); } + + EditPipelinePb toPb() { + EditPipelinePb pb = new EditPipelinePb(); + pb.setAllowDuplicateNames(allowDuplicateNames); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setCatalog(catalog); + pb.setChannel(channel); + pb.setClusters(clusters); + pb.setConfiguration(configuration); + pb.setContinuous(continuous); + pb.setDeployment(deployment); + pb.setDevelopment(development); + pb.setEdition(edition); + pb.setEventLog(eventLog); + pb.setExpectedLastModified(expectedLastModified); + pb.setFilters(filters); + pb.setGatewayDefinition(gatewayDefinition); + pb.setId(id); + pb.setIngestionDefinition(ingestionDefinition); + pb.setLibraries(libraries); + pb.setName(name); + pb.setNotifications(notifications); + pb.setPhoton(photon); + pb.setPipelineId(pipelineId); + pb.setRestartWindow(restartWindow); + pb.setRootPath(rootPath); + pb.setRunAs(runAs); + pb.setSchema(schema); + pb.setServerless(serverless); + pb.setStorage(storage); + pb.setTags(tags); + pb.setTarget(target); + pb.setTrigger(trigger); + + return pb; + } + + static EditPipeline fromPb(EditPipelinePb pb) { + EditPipeline model = new EditPipeline(); + model.setAllowDuplicateNames(pb.getAllowDuplicateNames()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setCatalog(pb.getCatalog()); + model.setChannel(pb.getChannel()); + model.setClusters(pb.getClusters()); + model.setConfiguration(pb.getConfiguration()); + model.setContinuous(pb.getContinuous()); + model.setDeployment(pb.getDeployment()); + model.setDevelopment(pb.getDevelopment()); + model.setEdition(pb.getEdition()); + model.setEventLog(pb.getEventLog()); + model.setExpectedLastModified(pb.getExpectedLastModified()); + model.setFilters(pb.getFilters()); + model.setGatewayDefinition(pb.getGatewayDefinition()); + model.setId(pb.getId()); + model.setIngestionDefinition(pb.getIngestionDefinition()); + model.setLibraries(pb.getLibraries()); + model.setName(pb.getName()); + model.setNotifications(pb.getNotifications()); + model.setPhoton(pb.getPhoton()); + model.setPipelineId(pb.getPipelineId()); + model.setRestartWindow(pb.getRestartWindow()); + model.setRootPath(pb.getRootPath()); + model.setRunAs(pb.getRunAs()); + model.setSchema(pb.getSchema()); + model.setServerless(pb.getServerless()); + model.setStorage(pb.getStorage()); + model.setTags(pb.getTags()); + model.setTarget(pb.getTarget()); + model.setTrigger(pb.getTrigger()); + + return model; + } + + public static class EditPipelineSerializer extends JsonSerializer { + @Override + public void serialize(EditPipeline value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditPipelinePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditPipelineDeserializer extends JsonDeserializer { + @Override + public EditPipeline deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditPipelinePb pb = mapper.readValue(p, EditPipelinePb.class); + return EditPipeline.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelinePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelinePb.java new file mode 100755 index 000000000..74b1232ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelinePb.java @@ -0,0 +1,481 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class EditPipelinePb { + @JsonProperty("allow_duplicate_names") + private Boolean allowDuplicateNames; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("channel") + private String channel; + + @JsonProperty("clusters") + private Collection clusters; + + @JsonProperty("configuration") + private Map configuration; + + @JsonProperty("continuous") + private Boolean continuous; + + @JsonProperty("deployment") + private PipelineDeployment deployment; + + @JsonProperty("development") + private Boolean development; + + @JsonProperty("edition") + private String edition; + + @JsonProperty("event_log") + private EventLogSpec eventLog; + + @JsonProperty("expected_last_modified") + private Long expectedLastModified; + + @JsonProperty("filters") + private Filters filters; + + @JsonProperty("gateway_definition") + private IngestionGatewayPipelineDefinition gatewayDefinition; + + @JsonProperty("id") + private String id; + + @JsonProperty("ingestion_definition") + private IngestionPipelineDefinition ingestionDefinition; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("name") + private String name; + + @JsonProperty("notifications") + private Collection notifications; + + @JsonProperty("photon") + private Boolean photon; + + @JsonIgnore private String pipelineId; + + @JsonProperty("restart_window") + private RestartWindow restartWindow; + + @JsonProperty("root_path") + private String rootPath; + + @JsonProperty("run_as") + private RunAs runAs; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("serverless") + private Boolean serverless; + + @JsonProperty("storage") + private String storage; + + @JsonProperty("tags") + private Map tags; + + @JsonProperty("target") + private String target; + + @JsonProperty("trigger") + private PipelineTrigger trigger; + + public EditPipelinePb setAllowDuplicateNames(Boolean allowDuplicateNames) { + this.allowDuplicateNames = allowDuplicateNames; + return this; + } + + public Boolean getAllowDuplicateNames() { + return allowDuplicateNames; + } + + public EditPipelinePb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public EditPipelinePb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public EditPipelinePb setChannel(String channel) { + this.channel = channel; + return this; + } + + public String getChannel() { + return channel; + } + + public EditPipelinePb setClusters(Collection clusters) { + this.clusters = clusters; + return this; + } + + public Collection getClusters() { + return clusters; + } + + public EditPipelinePb setConfiguration(Map configuration) { + this.configuration = configuration; + return this; + } + + public Map getConfiguration() { + return configuration; + } + + public EditPipelinePb setContinuous(Boolean continuous) { + this.continuous = continuous; + return this; + } + + public Boolean getContinuous() { + return continuous; + } + + public EditPipelinePb setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + + public EditPipelinePb setDevelopment(Boolean development) { + this.development = development; + return this; + } + + public Boolean getDevelopment() { + return development; + } + + public EditPipelinePb setEdition(String edition) { + this.edition = edition; + return this; + } + + public String getEdition() { + return edition; + } + + public EditPipelinePb setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + + public EditPipelinePb setExpectedLastModified(Long expectedLastModified) { + this.expectedLastModified = expectedLastModified; + return this; + } + + public Long getExpectedLastModified() { + return expectedLastModified; + } + + public EditPipelinePb setFilters(Filters filters) { + this.filters = filters; + return this; + } + + public Filters getFilters() { + return filters; + } + + public EditPipelinePb setGatewayDefinition(IngestionGatewayPipelineDefinition gatewayDefinition) { + this.gatewayDefinition = gatewayDefinition; + return this; + } + + public IngestionGatewayPipelineDefinition getGatewayDefinition() { + return gatewayDefinition; + } + + public EditPipelinePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public EditPipelinePb setIngestionDefinition(IngestionPipelineDefinition ingestionDefinition) { + this.ingestionDefinition = ingestionDefinition; + return this; + } + + public IngestionPipelineDefinition getIngestionDefinition() { + return ingestionDefinition; + } + + public EditPipelinePb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public EditPipelinePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EditPipelinePb setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public EditPipelinePb setPhoton(Boolean photon) { + this.photon = photon; + return this; + } + + public Boolean getPhoton() { + return photon; + } + + public EditPipelinePb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public EditPipelinePb setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + + public EditPipelinePb setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + + public EditPipelinePb setRunAs(RunAs runAs) { + this.runAs = runAs; + return this; + } + + public RunAs getRunAs() { + return runAs; + } + + public EditPipelinePb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public EditPipelinePb setServerless(Boolean serverless) { + this.serverless = serverless; + return this; + } + + public Boolean getServerless() { + return serverless; + } + + public EditPipelinePb setStorage(String storage) { + this.storage = storage; + return this; + } + + public String getStorage() { + return storage; + } + + public EditPipelinePb setTags(Map tags) { + this.tags = tags; + return this; + } + + public Map getTags() { + return tags; + } + + public EditPipelinePb setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + public EditPipelinePb setTrigger(PipelineTrigger trigger) { + this.trigger = trigger; + return this; + } + + public PipelineTrigger getTrigger() { + return trigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditPipelinePb that = (EditPipelinePb) o; + return Objects.equals(allowDuplicateNames, that.allowDuplicateNames) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(catalog, that.catalog) + && Objects.equals(channel, that.channel) + && Objects.equals(clusters, that.clusters) + && Objects.equals(configuration, that.configuration) + && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) + && Objects.equals(development, that.development) + && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) + && Objects.equals(expectedLastModified, that.expectedLastModified) + && Objects.equals(filters, that.filters) + && Objects.equals(gatewayDefinition, that.gatewayDefinition) + && Objects.equals(id, that.id) + && Objects.equals(ingestionDefinition, that.ingestionDefinition) + && Objects.equals(libraries, that.libraries) + && Objects.equals(name, that.name) + && Objects.equals(notifications, that.notifications) + && Objects.equals(photon, that.photon) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) + && Objects.equals(runAs, that.runAs) + && Objects.equals(schema, that.schema) + && Objects.equals(serverless, that.serverless) + && Objects.equals(storage, that.storage) + && Objects.equals(tags, that.tags) + && Objects.equals(target, that.target) + && Objects.equals(trigger, that.trigger); + } + + @Override + public int hashCode() { + return Objects.hash( + allowDuplicateNames, + budgetPolicyId, + catalog, + channel, + clusters, + configuration, + continuous, + deployment, + development, + edition, + eventLog, + expectedLastModified, + filters, + gatewayDefinition, + id, + ingestionDefinition, + libraries, + name, + notifications, + photon, + pipelineId, + restartWindow, + rootPath, + runAs, + schema, + serverless, + storage, + tags, + target, + trigger); + } + + @Override + public String toString() { + return new ToStringer(EditPipelinePb.class) + .add("allowDuplicateNames", allowDuplicateNames) + .add("budgetPolicyId", budgetPolicyId) + .add("catalog", catalog) + .add("channel", channel) + .add("clusters", clusters) + .add("configuration", configuration) + .add("continuous", continuous) + .add("deployment", deployment) + .add("development", development) + .add("edition", edition) + .add("eventLog", eventLog) + .add("expectedLastModified", expectedLastModified) + .add("filters", filters) + .add("gatewayDefinition", gatewayDefinition) + .add("id", id) + .add("ingestionDefinition", ingestionDefinition) + .add("libraries", libraries) + .add("name", name) + .add("notifications", notifications) + .add("photon", photon) + .add("pipelineId", pipelineId) + .add("restartWindow", restartWindow) + .add("rootPath", rootPath) + .add("runAs", runAs) + .add("schema", schema) + .add("serverless", serverless) + .add("storage", storage) + .add("tags", tags) + .add("target", target) + .add("trigger", trigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java index 2bb8b38a5..45cacc16c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditPipelineResponse.EditPipelineResponseSerializer.class) +@JsonDeserialize(using = EditPipelineResponse.EditPipelineResponseDeserializer.class) public class EditPipelineResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(EditPipelineResponse.class).toString(); } + + EditPipelineResponsePb toPb() { + EditPipelineResponsePb pb = new EditPipelineResponsePb(); + + return pb; + } + + static EditPipelineResponse fromPb(EditPipelineResponsePb pb) { + EditPipelineResponse model = new EditPipelineResponse(); + + return model; + } + + public static class EditPipelineResponseSerializer extends JsonSerializer { + @Override + public void serialize( + EditPipelineResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditPipelineResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditPipelineResponseDeserializer + extends JsonDeserializer { + @Override + public EditPipelineResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditPipelineResponsePb pb = mapper.readValue(p, EditPipelineResponsePb.class); + return EditPipelineResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponsePb.java new file mode 100755 index 000000000..e8eb48388 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditPipelineResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditPipelineResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java index 9989d5b65..e22bf83e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ErrorDetail.ErrorDetailSerializer.class) +@JsonDeserialize(using = ErrorDetail.ErrorDetailDeserializer.class) public class ErrorDetail { /** The exception thrown for this error, with its chain of cause. */ - @JsonProperty("exceptions") private Collection exceptions; /** Whether this error is considered fatal, that is, unrecoverable. */ - @JsonProperty("fatal") private Boolean fatal; public ErrorDetail setExceptions(Collection exceptions) { @@ -56,4 +65,39 @@ public String toString() { .add("fatal", fatal) .toString(); } + + ErrorDetailPb toPb() { + ErrorDetailPb pb = new ErrorDetailPb(); + pb.setExceptions(exceptions); + pb.setFatal(fatal); + + return pb; + } + + static ErrorDetail fromPb(ErrorDetailPb pb) { + ErrorDetail model = new ErrorDetail(); + model.setExceptions(pb.getExceptions()); + model.setFatal(pb.getFatal()); + + return model; + } + + public static class ErrorDetailSerializer extends JsonSerializer { + @Override + public void serialize(ErrorDetail value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ErrorDetailPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ErrorDetailDeserializer extends JsonDeserializer { + @Override + public ErrorDetail deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ErrorDetailPb pb = mapper.readValue(p, ErrorDetailPb.class); + return ErrorDetail.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetailPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetailPb.java new file mode 100755 index 000000000..d1657a6b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetailPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ErrorDetailPb { + @JsonProperty("exceptions") + private Collection exceptions; + + @JsonProperty("fatal") + private Boolean fatal; + + public ErrorDetailPb setExceptions(Collection exceptions) { + this.exceptions = exceptions; + return this; + } + + public Collection getExceptions() { + return exceptions; + } + + public ErrorDetailPb setFatal(Boolean fatal) { + this.fatal = fatal; + return this; + } + + public Boolean getFatal() { + return fatal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ErrorDetailPb that = (ErrorDetailPb) o; + return Objects.equals(exceptions, that.exceptions) && Objects.equals(fatal, that.fatal); + } + + @Override + public int hashCode() { + return Objects.hash(exceptions, fatal); + } + + @Override + public String toString() { + return new ToStringer(ErrorDetailPb.class) + .add("exceptions", exceptions) + .add("fatal", fatal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java index 9bf0d882b..a54e944e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Configurable event log parameters. */ @Generated +@JsonSerialize(using = EventLogSpec.EventLogSpecSerializer.class) +@JsonDeserialize(using = EventLogSpec.EventLogSpecDeserializer.class) public class EventLogSpec { /** The UC catalog the event log is published under. */ - @JsonProperty("catalog") private String catalog; /** The name the event log is published to in UC. */ - @JsonProperty("name") private String name; /** The UC schema the event log is published under. */ - @JsonProperty("schema") private String schema; public EventLogSpec setCatalog(String catalog) { @@ -72,4 +80,41 @@ public String toString() { .add("schema", schema) .toString(); } + + EventLogSpecPb toPb() { + EventLogSpecPb pb = new EventLogSpecPb(); + pb.setCatalog(catalog); + pb.setName(name); + pb.setSchema(schema); + + return pb; + } + + static EventLogSpec fromPb(EventLogSpecPb pb) { + EventLogSpec model = new EventLogSpec(); + model.setCatalog(pb.getCatalog()); + model.setName(pb.getName()); + model.setSchema(pb.getSchema()); + + return model; + } + + public static class EventLogSpecSerializer extends JsonSerializer { + @Override + public void serialize(EventLogSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EventLogSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EventLogSpecDeserializer extends JsonDeserializer { + @Override + public EventLogSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EventLogSpecPb pb = mapper.readValue(p, EventLogSpecPb.class); + return EventLogSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpecPb.java new file mode 100755 index 000000000..8316e999d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpecPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configurable event log parameters. */ +@Generated +class EventLogSpecPb { + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema") + private String schema; + + public EventLogSpecPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public EventLogSpecPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EventLogSpecPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EventLogSpecPb that = (EventLogSpecPb) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema); + } + + @Override + public int hashCode() { + return Objects.hash(catalog, name, schema); + } + + @Override + public String toString() { + return new ToStringer(EventLogSpecPb.class) + .add("catalog", catalog) + .add("name", name) + .add("schema", schema) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java index 498f4c6e9..9cf137e7b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FileLibrary.FileLibrarySerializer.class) +@JsonDeserialize(using = FileLibrary.FileLibraryDeserializer.class) public class FileLibrary { /** The absolute path of the source code. */ - @JsonProperty("path") private String path; public FileLibrary setPath(String path) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(FileLibrary.class).add("path", path).toString(); } + + FileLibraryPb toPb() { + FileLibraryPb pb = new FileLibraryPb(); + pb.setPath(path); + + return pb; + } + + static FileLibrary fromPb(FileLibraryPb pb) { + FileLibrary model = new FileLibrary(); + model.setPath(pb.getPath()); + + return model; + } + + public static class FileLibrarySerializer extends JsonSerializer { + @Override + public void serialize(FileLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FileLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FileLibraryDeserializer extends JsonDeserializer { + @Override + public FileLibrary deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FileLibraryPb pb = mapper.readValue(p, FileLibraryPb.class); + return FileLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibraryPb.java new file mode 100755 index 000000000..26eacc068 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibraryPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FileLibraryPb { + @JsonProperty("path") + private String path; + + public FileLibraryPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileLibraryPb that = (FileLibraryPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(FileLibraryPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java index c6d0b76bd..1a1573ebb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Filters.FiltersSerializer.class) +@JsonDeserialize(using = Filters.FiltersDeserializer.class) public class Filters { /** Paths to exclude. */ - @JsonProperty("exclude") private Collection exclude; /** Paths to include. */ - @JsonProperty("include") private Collection include; public Filters setExclude(Collection exclude) { @@ -53,4 +62,39 @@ public int hashCode() { public String toString() { return new ToStringer(Filters.class).add("exclude", exclude).add("include", include).toString(); } + + FiltersPb toPb() { + FiltersPb pb = new FiltersPb(); + pb.setExclude(exclude); + pb.setInclude(include); + + return pb; + } + + static Filters fromPb(FiltersPb pb) { + Filters model = new Filters(); + model.setExclude(pb.getExclude()); + model.setInclude(pb.getInclude()); + + return model; + } + + public static class FiltersSerializer extends JsonSerializer { + @Override + public void serialize(Filters value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FiltersPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FiltersDeserializer extends JsonDeserializer { + @Override + public Filters deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FiltersPb pb = mapper.readValue(p, FiltersPb.class); + return Filters.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FiltersPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FiltersPb.java new file mode 100755 index 000000000..b718b044d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FiltersPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class FiltersPb { + @JsonProperty("exclude") + private Collection exclude; + + @JsonProperty("include") + private Collection include; + + public FiltersPb setExclude(Collection exclude) { + this.exclude = exclude; + return this; + } + + public Collection getExclude() { + return exclude; + } + + public FiltersPb setInclude(Collection include) { + this.include = include; + return this; + } + + public Collection getInclude() { + return include; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FiltersPb that = (FiltersPb) o; + return Objects.equals(exclude, that.exclude) && Objects.equals(include, that.include); + } + + @Override + public int hashCode() { + return Objects.hash(exclude, include); + } + + @Override + public String toString() { + return new ToStringer(FiltersPb.class) + .add("exclude", exclude) + .add("include", include) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java index 1af0089dc..0a7b071b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get pipeline permission levels */ @Generated +@JsonSerialize( + using = GetPipelinePermissionLevelsRequest.GetPipelinePermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = GetPipelinePermissionLevelsRequest.GetPipelinePermissionLevelsRequestDeserializer.class) public class GetPipelinePermissionLevelsRequest { /** The pipeline for which to get or manage permissions. */ - @JsonIgnore private String pipelineId; + private String pipelineId; public GetPipelinePermissionLevelsRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -41,4 +54,42 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + GetPipelinePermissionLevelsRequestPb toPb() { + GetPipelinePermissionLevelsRequestPb pb = new GetPipelinePermissionLevelsRequestPb(); + pb.setPipelineId(pipelineId); + + return pb; + } + + static GetPipelinePermissionLevelsRequest fromPb(GetPipelinePermissionLevelsRequestPb pb) { + GetPipelinePermissionLevelsRequest model = new GetPipelinePermissionLevelsRequest(); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class GetPipelinePermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPipelinePermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPipelinePermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPipelinePermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetPipelinePermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPipelinePermissionLevelsRequestPb pb = + mapper.readValue(p, GetPipelinePermissionLevelsRequestPb.class); + return GetPipelinePermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequestPb.java new file mode 100755 index 000000000..a14dee602 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get pipeline permission levels */ +@Generated +class GetPipelinePermissionLevelsRequestPb { + @JsonIgnore private String pipelineId; + + public GetPipelinePermissionLevelsRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPipelinePermissionLevelsRequestPb that = (GetPipelinePermissionLevelsRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(GetPipelinePermissionLevelsRequestPb.class) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponse.java index ffeb8c66a..24f30022c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponse.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetPipelinePermissionLevelsResponse.GetPipelinePermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetPipelinePermissionLevelsResponse.GetPipelinePermissionLevelsResponseDeserializer.class) public class GetPipelinePermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetPipelinePermissionLevelsResponse setPermissionLevels( @@ -43,4 +56,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetPipelinePermissionLevelsResponsePb toPb() { + GetPipelinePermissionLevelsResponsePb pb = new GetPipelinePermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetPipelinePermissionLevelsResponse fromPb(GetPipelinePermissionLevelsResponsePb pb) { + GetPipelinePermissionLevelsResponse model = new GetPipelinePermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetPipelinePermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPipelinePermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPipelinePermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPipelinePermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetPipelinePermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPipelinePermissionLevelsResponsePb pb = + mapper.readValue(p, GetPipelinePermissionLevelsResponsePb.class); + return GetPipelinePermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponsePb.java new file mode 100755 index 000000000..f4f706088 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPipelinePermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetPipelinePermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPipelinePermissionLevelsResponsePb that = (GetPipelinePermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetPipelinePermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequest.java index 35f4fc17f..08ebdc2ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get pipeline permissions */ @Generated +@JsonSerialize(using = GetPipelinePermissionsRequest.GetPipelinePermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetPipelinePermissionsRequest.GetPipelinePermissionsRequestDeserializer.class) public class GetPipelinePermissionsRequest { /** The pipeline for which to get or manage permissions. */ - @JsonIgnore private String pipelineId; + private String pipelineId; public GetPipelinePermissionsRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -41,4 +53,42 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + GetPipelinePermissionsRequestPb toPb() { + GetPipelinePermissionsRequestPb pb = new GetPipelinePermissionsRequestPb(); + pb.setPipelineId(pipelineId); + + return pb; + } + + static GetPipelinePermissionsRequest fromPb(GetPipelinePermissionsRequestPb pb) { + GetPipelinePermissionsRequest model = new GetPipelinePermissionsRequest(); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class GetPipelinePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPipelinePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPipelinePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPipelinePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetPipelinePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPipelinePermissionsRequestPb pb = + mapper.readValue(p, GetPipelinePermissionsRequestPb.class); + return GetPipelinePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequestPb.java new file mode 100755 index 000000000..8e6308b3f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get pipeline permissions */ +@Generated +class GetPipelinePermissionsRequestPb { + @JsonIgnore private String pipelineId; + + public GetPipelinePermissionsRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPipelinePermissionsRequestPb that = (GetPipelinePermissionsRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(GetPipelinePermissionsRequestPb.class) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequest.java index f8612ef4c..e4ce337a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a pipeline */ @Generated +@JsonSerialize(using = GetPipelineRequest.GetPipelineRequestSerializer.class) +@JsonDeserialize(using = GetPipelineRequest.GetPipelineRequestDeserializer.class) public class GetPipelineRequest { /** */ - @JsonIgnore private String pipelineId; + private String pipelineId; public GetPipelineRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetPipelineRequest.class).add("pipelineId", pipelineId).toString(); } + + GetPipelineRequestPb toPb() { + GetPipelineRequestPb pb = new GetPipelineRequestPb(); + pb.setPipelineId(pipelineId); + + return pb; + } + + static GetPipelineRequest fromPb(GetPipelineRequestPb pb) { + GetPipelineRequest model = new GetPipelineRequest(); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class GetPipelineRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetPipelineRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPipelineRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPipelineRequestDeserializer extends JsonDeserializer { + @Override + public GetPipelineRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPipelineRequestPb pb = mapper.readValue(p, GetPipelineRequestPb.class); + return GetPipelineRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequestPb.java new file mode 100755 index 000000000..8834c9e09 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a pipeline */ +@Generated +class GetPipelineRequestPb { + @JsonIgnore private String pipelineId; + + public GetPipelineRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPipelineRequestPb that = (GetPipelineRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(GetPipelineRequestPb.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java index 0654879e3..65a8de195 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java @@ -4,58 +4,57 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetPipelineResponse.GetPipelineResponseSerializer.class) +@JsonDeserialize(using = GetPipelineResponse.GetPipelineResponseDeserializer.class) public class GetPipelineResponse { /** An optional message detailing the cause of the pipeline state. */ - @JsonProperty("cause") private String cause; /** The ID of the cluster that the pipeline is running on. */ - @JsonProperty("cluster_id") private String clusterId; /** The username of the pipeline creator. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** Serverless budget policy ID of this pipeline. */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; /** The health of a pipeline. */ - @JsonProperty("health") private GetPipelineResponseHealth health; /** The last time the pipeline settings were modified or created. */ - @JsonProperty("last_modified") private Long lastModified; /** Status of the latest updates for the pipeline. Ordered with the newest update first. */ - @JsonProperty("latest_updates") private Collection latestUpdates; /** A human friendly identifier for the pipeline, taken from the `spec`. */ - @JsonProperty("name") private String name; /** The ID of the pipeline. */ - @JsonProperty("pipeline_id") private String pipelineId; /** Username of the user that the pipeline will run on behalf of. */ - @JsonProperty("run_as_user_name") private String runAsUserName; /** The pipeline specification. This field is not returned when called by `ListPipelines`. */ - @JsonProperty("spec") private PipelineSpec spec; /** The pipeline state. */ - @JsonProperty("state") private PipelineState state; public GetPipelineResponse setCause(String cause) { @@ -219,4 +218,61 @@ public String toString() { .add("state", state) .toString(); } + + GetPipelineResponsePb toPb() { + GetPipelineResponsePb pb = new GetPipelineResponsePb(); + pb.setCause(cause); + pb.setClusterId(clusterId); + pb.setCreatorUserName(creatorUserName); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + pb.setHealth(health); + pb.setLastModified(lastModified); + pb.setLatestUpdates(latestUpdates); + pb.setName(name); + pb.setPipelineId(pipelineId); + pb.setRunAsUserName(runAsUserName); + pb.setSpec(spec); + pb.setState(state); + + return pb; + } + + static GetPipelineResponse fromPb(GetPipelineResponsePb pb) { + GetPipelineResponse model = new GetPipelineResponse(); + model.setCause(pb.getCause()); + model.setClusterId(pb.getClusterId()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + model.setHealth(pb.getHealth()); + model.setLastModified(pb.getLastModified()); + model.setLatestUpdates(pb.getLatestUpdates()); + model.setName(pb.getName()); + model.setPipelineId(pb.getPipelineId()); + model.setRunAsUserName(pb.getRunAsUserName()); + model.setSpec(pb.getSpec()); + model.setState(pb.getState()); + + return model; + } + + public static class GetPipelineResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetPipelineResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPipelineResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPipelineResponseDeserializer + extends JsonDeserializer { + @Override + public GetPipelineResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPipelineResponsePb pb = mapper.readValue(p, GetPipelineResponsePb.class); + return GetPipelineResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponsePb.java new file mode 100755 index 000000000..ce0d52771 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponsePb.java @@ -0,0 +1,210 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetPipelineResponsePb { + @JsonProperty("cause") + private String cause; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + @JsonProperty("health") + private GetPipelineResponseHealth health; + + @JsonProperty("last_modified") + private Long lastModified; + + @JsonProperty("latest_updates") + private Collection latestUpdates; + + @JsonProperty("name") + private String name; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("run_as_user_name") + private String runAsUserName; + + @JsonProperty("spec") + private PipelineSpec spec; + + @JsonProperty("state") + private PipelineState state; + + public GetPipelineResponsePb setCause(String cause) { + this.cause = cause; + return this; + } + + public String getCause() { + return cause; + } + + public GetPipelineResponsePb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public GetPipelineResponsePb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public GetPipelineResponsePb setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public GetPipelineResponsePb setHealth(GetPipelineResponseHealth health) { + this.health = health; + return this; + } + + public GetPipelineResponseHealth getHealth() { + return health; + } + + public GetPipelineResponsePb setLastModified(Long lastModified) { + this.lastModified = lastModified; + return this; + } + + public Long getLastModified() { + return lastModified; + } + + public GetPipelineResponsePb setLatestUpdates(Collection latestUpdates) { + this.latestUpdates = latestUpdates; + return this; + } + + public Collection getLatestUpdates() { + return latestUpdates; + } + + public GetPipelineResponsePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetPipelineResponsePb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public GetPipelineResponsePb setRunAsUserName(String runAsUserName) { + this.runAsUserName = runAsUserName; + return this; + } + + public String getRunAsUserName() { + return runAsUserName; + } + + public GetPipelineResponsePb setSpec(PipelineSpec spec) { + this.spec = spec; + return this; + } + + public PipelineSpec getSpec() { + return spec; + } + + public GetPipelineResponsePb setState(PipelineState state) { + this.state = state; + return this; + } + + public PipelineState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPipelineResponsePb that = (GetPipelineResponsePb) o; + return Objects.equals(cause, that.cause) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(health, that.health) + && Objects.equals(lastModified, that.lastModified) + && Objects.equals(latestUpdates, that.latestUpdates) + && Objects.equals(name, that.name) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(runAsUserName, that.runAsUserName) + && Objects.equals(spec, that.spec) + && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash( + cause, + clusterId, + creatorUserName, + effectiveBudgetPolicyId, + health, + lastModified, + latestUpdates, + name, + pipelineId, + runAsUserName, + spec, + state); + } + + @Override + public String toString() { + return new ToStringer(GetPipelineResponsePb.class) + .add("cause", cause) + .add("clusterId", clusterId) + .add("creatorUserName", creatorUserName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("health", health) + .add("lastModified", lastModified) + .add("latestUpdates", latestUpdates) + .add("name", name) + .add("pipelineId", pipelineId) + .add("runAsUserName", runAsUserName) + .add("spec", spec) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java index 19e9844a9..2e8abaf5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a pipeline update */ @Generated +@JsonSerialize(using = GetUpdateRequest.GetUpdateRequestSerializer.class) +@JsonDeserialize(using = GetUpdateRequest.GetUpdateRequestDeserializer.class) public class GetUpdateRequest { /** The ID of the pipeline. */ - @JsonIgnore private String pipelineId; + private String pipelineId; /** The ID of the update. */ - @JsonIgnore private String updateId; + private String updateId; public GetUpdateRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -54,4 +65,40 @@ public String toString() { .add("updateId", updateId) .toString(); } + + GetUpdateRequestPb toPb() { + GetUpdateRequestPb pb = new GetUpdateRequestPb(); + pb.setPipelineId(pipelineId); + pb.setUpdateId(updateId); + + return pb; + } + + static GetUpdateRequest fromPb(GetUpdateRequestPb pb) { + GetUpdateRequest model = new GetUpdateRequest(); + model.setPipelineId(pb.getPipelineId()); + model.setUpdateId(pb.getUpdateId()); + + return model; + } + + public static class GetUpdateRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetUpdateRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetUpdateRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetUpdateRequestDeserializer extends JsonDeserializer { + @Override + public GetUpdateRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetUpdateRequestPb pb = mapper.readValue(p, GetUpdateRequestPb.class); + return GetUpdateRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequestPb.java new file mode 100755 index 000000000..5e2a34d09 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a pipeline update */ +@Generated +class GetUpdateRequestPb { + @JsonIgnore private String pipelineId; + + @JsonIgnore private String updateId; + + public GetUpdateRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public GetUpdateRequestPb setUpdateId(String updateId) { + this.updateId = updateId; + return this; + } + + public String getUpdateId() { + return updateId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUpdateRequestPb that = (GetUpdateRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId) && Objects.equals(updateId, that.updateId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId, updateId); + } + + @Override + public String toString() { + return new ToStringer(GetUpdateRequestPb.class) + .add("pipelineId", pipelineId) + .add("updateId", updateId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java index b2fc7b4fb..8ad05860c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetUpdateResponse.GetUpdateResponseSerializer.class) +@JsonDeserialize(using = GetUpdateResponse.GetUpdateResponseDeserializer.class) public class GetUpdateResponse { /** The current update info. */ - @JsonProperty("update") private UpdateInfo update; public GetUpdateResponse setUpdate(UpdateInfo update) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetUpdateResponse.class).add("update", update).toString(); } + + GetUpdateResponsePb toPb() { + GetUpdateResponsePb pb = new GetUpdateResponsePb(); + pb.setUpdate(update); + + return pb; + } + + static GetUpdateResponse fromPb(GetUpdateResponsePb pb) { + GetUpdateResponse model = new GetUpdateResponse(); + model.setUpdate(pb.getUpdate()); + + return model; + } + + public static class GetUpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetUpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetUpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetUpdateResponseDeserializer extends JsonDeserializer { + @Override + public GetUpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetUpdateResponsePb pb = mapper.readValue(p, GetUpdateResponsePb.class); + return GetUpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponsePb.java new file mode 100755 index 000000000..1d2ec67af --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetUpdateResponsePb { + @JsonProperty("update") + private UpdateInfo update; + + public GetUpdateResponsePb setUpdate(UpdateInfo update) { + this.update = update; + return this; + } + + public UpdateInfo getUpdate() { + return update; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUpdateResponsePb that = (GetUpdateResponsePb) o; + return Objects.equals(update, that.update); + } + + @Override + public int hashCode() { + return Objects.hash(update); + } + + @Override + public String toString() { + return new ToStringer(GetUpdateResponsePb.class).add("update", update).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java index c1ed47fe4..478f7132c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = IngestionConfig.IngestionConfigSerializer.class) +@JsonDeserialize(using = IngestionConfig.IngestionConfigDeserializer.class) public class IngestionConfig { /** Select a specific source report. */ - @JsonProperty("report") private ReportSpec report; /** Select all tables from a specific source schema. */ - @JsonProperty("schema") private SchemaSpec schema; /** Select a specific source table. */ - @JsonProperty("table") private TableSpec table; public IngestionConfig setReport(ReportSpec report) { @@ -71,4 +79,42 @@ public String toString() { .add("table", table) .toString(); } + + IngestionConfigPb toPb() { + IngestionConfigPb pb = new IngestionConfigPb(); + pb.setReport(report); + pb.setSchema(schema); + pb.setTable(table); + + return pb; + } + + static IngestionConfig fromPb(IngestionConfigPb pb) { + IngestionConfig model = new IngestionConfig(); + model.setReport(pb.getReport()); + model.setSchema(pb.getSchema()); + model.setTable(pb.getTable()); + + return model; + } + + public static class IngestionConfigSerializer extends JsonSerializer { + @Override + public void serialize(IngestionConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + IngestionConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class IngestionConfigDeserializer extends JsonDeserializer { + @Override + public IngestionConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + IngestionConfigPb pb = mapper.readValue(p, IngestionConfigPb.class); + return IngestionConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfigPb.java new file mode 100755 index 000000000..b2ffed5c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfigPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class IngestionConfigPb { + @JsonProperty("report") + private ReportSpec report; + + @JsonProperty("schema") + private SchemaSpec schema; + + @JsonProperty("table") + private TableSpec table; + + public IngestionConfigPb setReport(ReportSpec report) { + this.report = report; + return this; + } + + public ReportSpec getReport() { + return report; + } + + public IngestionConfigPb setSchema(SchemaSpec schema) { + this.schema = schema; + return this; + } + + public SchemaSpec getSchema() { + return schema; + } + + public IngestionConfigPb setTable(TableSpec table) { + this.table = table; + return this; + } + + public TableSpec getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestionConfigPb that = (IngestionConfigPb) o; + return Objects.equals(report, that.report) + && Objects.equals(schema, that.schema) + && Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(report, schema, table); + } + + @Override + public String toString() { + return new ToStringer(IngestionConfigPb.class) + .add("report", report) + .add("schema", schema) + .add("table", table) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java index 58142fafd..ff656aa2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java @@ -4,27 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = IngestionGatewayPipelineDefinition.IngestionGatewayPipelineDefinitionSerializer.class) +@JsonDeserialize( + using = IngestionGatewayPipelineDefinition.IngestionGatewayPipelineDefinitionDeserializer.class) public class IngestionGatewayPipelineDefinition { /** * [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this * gateway pipeline uses to communicate with the source. */ - @JsonProperty("connection_id") private String connectionId; /** * Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the * source. */ - @JsonProperty("connection_name") private String connectionName; /** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */ - @JsonProperty("gateway_storage_catalog") private String gatewayStorageCatalog; /** @@ -32,11 +42,9 @@ public class IngestionGatewayPipelineDefinition { * destination to use for the data that is extracted by the gateway. Delta Live Tables system will * automatically create the storage location under the catalog and schema. */ - @JsonProperty("gateway_storage_name") private String gatewayStorageName; /** Required, Immutable. The name of the schema for the gateway pipelines's storage location. */ - @JsonProperty("gateway_storage_schema") private String gatewayStorageSchema; public IngestionGatewayPipelineDefinition setConnectionId(String connectionId) { @@ -116,4 +124,50 @@ public String toString() { .add("gatewayStorageSchema", gatewayStorageSchema) .toString(); } + + IngestionGatewayPipelineDefinitionPb toPb() { + IngestionGatewayPipelineDefinitionPb pb = new IngestionGatewayPipelineDefinitionPb(); + pb.setConnectionId(connectionId); + pb.setConnectionName(connectionName); + pb.setGatewayStorageCatalog(gatewayStorageCatalog); + pb.setGatewayStorageName(gatewayStorageName); + pb.setGatewayStorageSchema(gatewayStorageSchema); + + return pb; + } + + static IngestionGatewayPipelineDefinition fromPb(IngestionGatewayPipelineDefinitionPb pb) { + IngestionGatewayPipelineDefinition model = new IngestionGatewayPipelineDefinition(); + model.setConnectionId(pb.getConnectionId()); + model.setConnectionName(pb.getConnectionName()); + model.setGatewayStorageCatalog(pb.getGatewayStorageCatalog()); + model.setGatewayStorageName(pb.getGatewayStorageName()); + model.setGatewayStorageSchema(pb.getGatewayStorageSchema()); + + return model; + } + + public static class IngestionGatewayPipelineDefinitionSerializer + extends JsonSerializer { + @Override + public void serialize( + IngestionGatewayPipelineDefinition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + IngestionGatewayPipelineDefinitionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class IngestionGatewayPipelineDefinitionDeserializer + extends JsonDeserializer { + @Override + public IngestionGatewayPipelineDefinition deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + IngestionGatewayPipelineDefinitionPb pb = + mapper.readValue(p, IngestionGatewayPipelineDefinitionPb.class); + return IngestionGatewayPipelineDefinition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinitionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinitionPb.java new file mode 100755 index 000000000..8e984640d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinitionPb.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class IngestionGatewayPipelineDefinitionPb { + @JsonProperty("connection_id") + private String connectionId; + + @JsonProperty("connection_name") + private String connectionName; + + @JsonProperty("gateway_storage_catalog") + private String gatewayStorageCatalog; + + @JsonProperty("gateway_storage_name") + private String gatewayStorageName; + + @JsonProperty("gateway_storage_schema") + private String gatewayStorageSchema; + + public IngestionGatewayPipelineDefinitionPb setConnectionId(String connectionId) { + this.connectionId = connectionId; + return this; + } + + public String getConnectionId() { + return connectionId; + } + + public IngestionGatewayPipelineDefinitionPb setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + + public IngestionGatewayPipelineDefinitionPb setGatewayStorageCatalog( + String gatewayStorageCatalog) { + this.gatewayStorageCatalog = gatewayStorageCatalog; + return this; + } + + public String getGatewayStorageCatalog() { + return gatewayStorageCatalog; + } + + public IngestionGatewayPipelineDefinitionPb setGatewayStorageName(String gatewayStorageName) { + this.gatewayStorageName = gatewayStorageName; + return this; + } + + public String getGatewayStorageName() { + return gatewayStorageName; + } + + public IngestionGatewayPipelineDefinitionPb setGatewayStorageSchema(String gatewayStorageSchema) { + this.gatewayStorageSchema = gatewayStorageSchema; + return this; + } + + public String getGatewayStorageSchema() { + return gatewayStorageSchema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestionGatewayPipelineDefinitionPb that = (IngestionGatewayPipelineDefinitionPb) o; + return Objects.equals(connectionId, that.connectionId) + && Objects.equals(connectionName, that.connectionName) + && Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog) + && Objects.equals(gatewayStorageName, that.gatewayStorageName) + && Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema); + } + + @Override + public int hashCode() { + return Objects.hash( + connectionId, + connectionName, + gatewayStorageCatalog, + gatewayStorageName, + gatewayStorageSchema); + } + + @Override + public String toString() { + return new ToStringer(IngestionGatewayPipelineDefinitionPb.class) + .add("connectionId", connectionId) + .add("connectionName", connectionName) + .add("gatewayStorageCatalog", gatewayStorageCatalog) + .add("gatewayStorageName", gatewayStorageName) + .add("gatewayStorageSchema", gatewayStorageSchema) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index 1471fd886..b53710476 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -4,45 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = IngestionPipelineDefinition.IngestionPipelineDefinitionSerializer.class) +@JsonDeserialize(using = IngestionPipelineDefinition.IngestionPipelineDefinitionDeserializer.class) public class IngestionPipelineDefinition { /** * Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with * the source. This is used with connectors for applications like Salesforce, Workday, and so on. */ - @JsonProperty("connection_name") private String connectionName; /** * Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate * with the source database. This is used with connectors to databases like SQL Server. */ - @JsonProperty("ingestion_gateway_id") private String ingestionGatewayId; /** * Required. Settings specifying tables to replicate and the destination for the replicated * tables. */ - @JsonProperty("objects") private Collection objects; /** * The type of the foreign source. The source type will be inferred from the source connection or * ingestion gateway. This field is output only and will be ignored if provided. */ - @JsonProperty("source_type") private IngestionSourceType sourceType; /** * Configuration settings to control the ingestion of tables. These settings are applied to all * tables in the pipeline. */ - @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; public IngestionPipelineDefinition setConnectionName(String connectionName) { @@ -118,4 +124,49 @@ public String toString() { .add("tableConfiguration", tableConfiguration) .toString(); } + + IngestionPipelineDefinitionPb toPb() { + IngestionPipelineDefinitionPb pb = new IngestionPipelineDefinitionPb(); + pb.setConnectionName(connectionName); + pb.setIngestionGatewayId(ingestionGatewayId); + pb.setObjects(objects); + pb.setSourceType(sourceType); + pb.setTableConfiguration(tableConfiguration); + + return pb; + } + + static IngestionPipelineDefinition fromPb(IngestionPipelineDefinitionPb pb) { + IngestionPipelineDefinition model = new IngestionPipelineDefinition(); + model.setConnectionName(pb.getConnectionName()); + model.setIngestionGatewayId(pb.getIngestionGatewayId()); + model.setObjects(pb.getObjects()); + model.setSourceType(pb.getSourceType()); + model.setTableConfiguration(pb.getTableConfiguration()); + + return model; + } + + public static class IngestionPipelineDefinitionSerializer + extends JsonSerializer { + @Override + public void serialize( + IngestionPipelineDefinition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + IngestionPipelineDefinitionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class IngestionPipelineDefinitionDeserializer + extends JsonDeserializer { + @Override + public IngestionPipelineDefinition deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + IngestionPipelineDefinitionPb pb = mapper.readValue(p, IngestionPipelineDefinitionPb.class); + return IngestionPipelineDefinition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinitionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinitionPb.java new file mode 100755 index 000000000..2c339b476 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinitionPb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class IngestionPipelineDefinitionPb { + @JsonProperty("connection_name") + private String connectionName; + + @JsonProperty("ingestion_gateway_id") + private String ingestionGatewayId; + + @JsonProperty("objects") + private Collection objects; + + @JsonProperty("source_type") + private IngestionSourceType sourceType; + + @JsonProperty("table_configuration") + private TableSpecificConfig tableConfiguration; + + public IngestionPipelineDefinitionPb setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + + public IngestionPipelineDefinitionPb setIngestionGatewayId(String ingestionGatewayId) { + this.ingestionGatewayId = ingestionGatewayId; + return this; + } + + public String getIngestionGatewayId() { + return ingestionGatewayId; + } + + public IngestionPipelineDefinitionPb setObjects(Collection objects) { + this.objects = objects; + return this; + } + + public Collection getObjects() { + return objects; + } + + public IngestionPipelineDefinitionPb setSourceType(IngestionSourceType sourceType) { + this.sourceType = sourceType; + return this; + } + + public IngestionSourceType getSourceType() { + return sourceType; + } + + public IngestionPipelineDefinitionPb setTableConfiguration( + TableSpecificConfig tableConfiguration) { + this.tableConfiguration = tableConfiguration; + return this; + } + + public TableSpecificConfig getTableConfiguration() { + return tableConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestionPipelineDefinitionPb that = (IngestionPipelineDefinitionPb) o; + return Objects.equals(connectionName, that.connectionName) + && Objects.equals(ingestionGatewayId, that.ingestionGatewayId) + && Objects.equals(objects, that.objects) + && Objects.equals(sourceType, that.sourceType) + && Objects.equals(tableConfiguration, that.tableConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash( + connectionName, ingestionGatewayId, objects, sourceType, tableConfiguration); + } + + @Override + public String toString() { + return new ToStringer(IngestionPipelineDefinitionPb.class) + .add("connectionName", connectionName) + .add("ingestionGatewayId", ingestionGatewayId) + .add("objects", objects) + .add("sourceType", sourceType) + .add("tableConfiguration", tableConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java index e0f49c010..18295d359 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java @@ -3,14 +3,24 @@ package com.databricks.sdk.service.pipelines; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List pipeline events */ @Generated +@JsonSerialize(using = ListPipelineEventsRequest.ListPipelineEventsRequestSerializer.class) +@JsonDeserialize(using = ListPipelineEventsRequest.ListPipelineEventsRequestDeserializer.class) public class ListPipelineEventsRequest { /** * Criteria to select a subset of results, expressed using a SQL-like syntax. The supported @@ -20,16 +30,12 @@ public class ListPipelineEventsRequest { *

Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp> * '2021-07-22T06:37:33.083Z' */ - @JsonIgnore - @QueryParam("filter") private String filter; /** * Max number of entries to return in a single page. The system may return fewer than max_results * events in a response, even if there are more events available. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** @@ -37,8 +43,6 @@ public class ListPipelineEventsRequest { * The sort order can be ascending or descending. By default, events are returned in descending * order by timestamp. */ - @JsonIgnore - @QueryParam("order_by") private Collection orderBy; /** @@ -46,12 +50,10 @@ public class ListPipelineEventsRequest { * request except max_results. An error is returned if any fields other than max_results are set * when this field is set. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The pipeline to return events for. */ - @JsonIgnore private String pipelineId; + private String pipelineId; public ListPipelineEventsRequest setFilter(String filter) { this.filter = filter; @@ -125,4 +127,49 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + ListPipelineEventsRequestPb toPb() { + ListPipelineEventsRequestPb pb = new ListPipelineEventsRequestPb(); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + pb.setPipelineId(pipelineId); + + return pb; + } + + static ListPipelineEventsRequest fromPb(ListPipelineEventsRequestPb pb) { + ListPipelineEventsRequest model = new ListPipelineEventsRequest(); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class ListPipelineEventsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPipelineEventsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPipelineEventsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPipelineEventsRequestDeserializer + extends JsonDeserializer { + @Override + public ListPipelineEventsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPipelineEventsRequestPb pb = mapper.readValue(p, ListPipelineEventsRequestPb.class); + return ListPipelineEventsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequestPb.java new file mode 100755 index 000000000..0810579d1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequestPb.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** List pipeline events */ +@Generated +class ListPipelineEventsRequestPb { + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("order_by") + private Collection orderBy; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private String pipelineId; + + public ListPipelineEventsRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListPipelineEventsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListPipelineEventsRequestPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public ListPipelineEventsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListPipelineEventsRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPipelineEventsRequestPb that = (ListPipelineEventsRequestPb) o; + return Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(filter, maxResults, orderBy, pageToken, pipelineId); + } + + @Override + public String toString() { + return new ToStringer(ListPipelineEventsRequestPb.class) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java index b73d5cdd1..4a8875de5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListPipelineEventsResponse.ListPipelineEventsResponseSerializer.class) +@JsonDeserialize(using = ListPipelineEventsResponse.ListPipelineEventsResponseDeserializer.class) public class ListPipelineEventsResponse { /** The list of events matching the request criteria. */ - @JsonProperty("events") private Collection events; /** If present, a token to fetch the next page of events. */ - @JsonProperty("next_page_token") private String nextPageToken; /** If present, a token to fetch the previous page of events. */ - @JsonProperty("prev_page_token") private String prevPageToken; public ListPipelineEventsResponse setEvents(Collection events) { @@ -72,4 +80,45 @@ public String toString() { .add("prevPageToken", prevPageToken) .toString(); } + + ListPipelineEventsResponsePb toPb() { + ListPipelineEventsResponsePb pb = new ListPipelineEventsResponsePb(); + pb.setEvents(events); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + + return pb; + } + + static ListPipelineEventsResponse fromPb(ListPipelineEventsResponsePb pb) { + ListPipelineEventsResponse model = new ListPipelineEventsResponse(); + model.setEvents(pb.getEvents()); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + + return model; + } + + public static class ListPipelineEventsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPipelineEventsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPipelineEventsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPipelineEventsResponseDeserializer + extends JsonDeserializer { + @Override + public ListPipelineEventsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPipelineEventsResponsePb pb = mapper.readValue(p, ListPipelineEventsResponsePb.class); + return ListPipelineEventsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponsePb.java new file mode 100755 index 000000000..2cb8fb177 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListPipelineEventsResponsePb { + @JsonProperty("events") + private Collection events; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + public ListPipelineEventsResponsePb setEvents(Collection events) { + this.events = events; + return this; + } + + public Collection getEvents() { + return events; + } + + public ListPipelineEventsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListPipelineEventsResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPipelineEventsResponsePb that = (ListPipelineEventsResponsePb) o; + return Objects.equals(events, that.events) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(events, nextPageToken, prevPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPipelineEventsResponsePb.class) + .add("events", events) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java index 07c9fd1b5..331c53bbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java @@ -3,14 +3,24 @@ package com.databricks.sdk.service.pipelines; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** List pipelines */ @Generated +@JsonSerialize(using = ListPipelinesRequest.ListPipelinesRequestSerializer.class) +@JsonDeserialize(using = ListPipelinesRequest.ListPipelinesRequestDeserializer.class) public class ListPipelinesRequest { /** * Select a subset of results based on the specified criteria. The supported filters are: @@ -21,8 +31,6 @@ public class ListPipelinesRequest { * *

Composite filters are not supported. This field is optional. */ - @JsonIgnore - @QueryParam("filter") private String filter; /** @@ -31,21 +39,15 @@ public class ListPipelinesRequest { * optional. The default value is 25. The maximum value is 100. An error is returned if the value * of max_results is greater than 100. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** * A list of strings specifying the order of results. Supported order_by fields are id and name. * The default is id asc. This field is optional. */ - @JsonIgnore - @QueryParam("order_by") private Collection orderBy; /** Page token returned by previous call */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListPipelinesRequest setFilter(String filter) { @@ -109,4 +111,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListPipelinesRequestPb toPb() { + ListPipelinesRequestPb pb = new ListPipelinesRequestPb(); + pb.setFilter(filter); + pb.setMaxResults(maxResults); + pb.setOrderBy(orderBy); + pb.setPageToken(pageToken); + + return pb; + } + + static ListPipelinesRequest fromPb(ListPipelinesRequestPb pb) { + ListPipelinesRequest model = new ListPipelinesRequest(); + model.setFilter(pb.getFilter()); + model.setMaxResults(pb.getMaxResults()); + model.setOrderBy(pb.getOrderBy()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListPipelinesRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListPipelinesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPipelinesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPipelinesRequestDeserializer + extends JsonDeserializer { + @Override + public ListPipelinesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPipelinesRequestPb pb = mapper.readValue(p, ListPipelinesRequestPb.class); + return ListPipelinesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequestPb.java new file mode 100755 index 000000000..01a3aecab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequestPb.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** List pipelines */ +@Generated +class ListPipelinesRequestPb { + @JsonIgnore + @QueryParam("filter") + private String filter; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("order_by") + private Collection orderBy; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListPipelinesRequestPb setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public ListPipelinesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListPipelinesRequestPb setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public ListPipelinesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPipelinesRequestPb that = (ListPipelinesRequestPb) o; + return Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filter, maxResults, orderBy, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPipelinesRequestPb.class) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponse.java index 8fe88295f..a8e284fc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListPipelinesResponse.ListPipelinesResponseSerializer.class) +@JsonDeserialize(using = ListPipelinesResponse.ListPipelinesResponseDeserializer.class) public class ListPipelinesResponse { /** If present, a token to fetch the next page of events. */ - @JsonProperty("next_page_token") private String nextPageToken; /** The list of events matching the request criteria. */ - @JsonProperty("statuses") private Collection statuses; public ListPipelinesResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,43 @@ public String toString() { .add("statuses", statuses) .toString(); } + + ListPipelinesResponsePb toPb() { + ListPipelinesResponsePb pb = new ListPipelinesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setStatuses(statuses); + + return pb; + } + + static ListPipelinesResponse fromPb(ListPipelinesResponsePb pb) { + ListPipelinesResponse model = new ListPipelinesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setStatuses(pb.getStatuses()); + + return model; + } + + public static class ListPipelinesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPipelinesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPipelinesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPipelinesResponseDeserializer + extends JsonDeserializer { + @Override + public ListPipelinesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPipelinesResponsePb pb = mapper.readValue(p, ListPipelinesResponsePb.class); + return ListPipelinesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponsePb.java new file mode 100755 index 000000000..d8b244612 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListPipelinesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("statuses") + private Collection statuses; + + public ListPipelinesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListPipelinesResponsePb setStatuses(Collection statuses) { + this.statuses = statuses; + return this; + } + + public Collection getStatuses() { + return statuses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPipelinesResponsePb that = (ListPipelinesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(statuses, that.statuses); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, statuses); + } + + @Override + public String toString() { + return new ToStringer(ListPipelinesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("statuses", statuses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequest.java index 5808e3cc8..72f02f8d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequest.java @@ -3,30 +3,34 @@ package com.databricks.sdk.service.pipelines; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List pipeline updates */ @Generated +@JsonSerialize(using = ListUpdatesRequest.ListUpdatesRequestSerializer.class) +@JsonDeserialize(using = ListUpdatesRequest.ListUpdatesRequestDeserializer.class) public class ListUpdatesRequest { /** Max number of entries to return in a single page. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Page token returned by previous call */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** The pipeline to return updates for. */ - @JsonIgnore private String pipelineId; + private String pipelineId; /** If present, returns updates until and including this update_id. */ - @JsonIgnore - @QueryParam("until_update_id") private String untilUpdateId; public ListUpdatesRequest setMaxResults(Long maxResults) { @@ -90,4 +94,44 @@ public String toString() { .add("untilUpdateId", untilUpdateId) .toString(); } + + ListUpdatesRequestPb toPb() { + ListUpdatesRequestPb pb = new ListUpdatesRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setPipelineId(pipelineId); + pb.setUntilUpdateId(untilUpdateId); + + return pb; + } + + static ListUpdatesRequest fromPb(ListUpdatesRequestPb pb) { + ListUpdatesRequest model = new ListUpdatesRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setPipelineId(pb.getPipelineId()); + model.setUntilUpdateId(pb.getUntilUpdateId()); + + return model; + } + + public static class ListUpdatesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListUpdatesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListUpdatesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListUpdatesRequestDeserializer extends JsonDeserializer { + @Override + public ListUpdatesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListUpdatesRequestPb pb = mapper.readValue(p, ListUpdatesRequestPb.class); + return ListUpdatesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequestPb.java new file mode 100755 index 000000000..0fe37a876 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesRequestPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List pipeline updates */ +@Generated +class ListUpdatesRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private String pipelineId; + + @JsonIgnore + @QueryParam("until_update_id") + private String untilUpdateId; + + public ListUpdatesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListUpdatesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListUpdatesRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public ListUpdatesRequestPb setUntilUpdateId(String untilUpdateId) { + this.untilUpdateId = untilUpdateId; + return this; + } + + public String getUntilUpdateId() { + return untilUpdateId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUpdatesRequestPb that = (ListUpdatesRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(untilUpdateId, that.untilUpdateId); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, pipelineId, untilUpdateId); + } + + @Override + public String toString() { + return new ToStringer(ListUpdatesRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("pipelineId", pipelineId) + .add("untilUpdateId", untilUpdateId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponse.java index 191a24100..ec3915246 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponse.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListUpdatesResponse.ListUpdatesResponseSerializer.class) +@JsonDeserialize(using = ListUpdatesResponse.ListUpdatesResponseDeserializer.class) public class ListUpdatesResponse { /** * If present, then there are more results, and this a token to be used in a subsequent request to * fetch the next page. */ - @JsonProperty("next_page_token") private String nextPageToken; /** If present, then this token can be used in a subsequent request to fetch the previous page. */ - @JsonProperty("prev_page_token") private String prevPageToken; /** */ - @JsonProperty("updates") private Collection updates; public ListUpdatesResponse setNextPageToken(String nextPageToken) { @@ -75,4 +83,43 @@ public String toString() { .add("updates", updates) .toString(); } + + ListUpdatesResponsePb toPb() { + ListUpdatesResponsePb pb = new ListUpdatesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPrevPageToken(prevPageToken); + pb.setUpdates(updates); + + return pb; + } + + static ListUpdatesResponse fromPb(ListUpdatesResponsePb pb) { + ListUpdatesResponse model = new ListUpdatesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrevPageToken(pb.getPrevPageToken()); + model.setUpdates(pb.getUpdates()); + + return model; + } + + public static class ListUpdatesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListUpdatesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListUpdatesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListUpdatesResponseDeserializer + extends JsonDeserializer { + @Override + public ListUpdatesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListUpdatesResponsePb pb = mapper.readValue(p, ListUpdatesResponsePb.class); + return ListUpdatesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponsePb.java new file mode 100755 index 000000000..c9ec1d9ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListUpdatesResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListUpdatesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("prev_page_token") + private String prevPageToken; + + @JsonProperty("updates") + private Collection updates; + + public ListUpdatesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListUpdatesResponsePb setPrevPageToken(String prevPageToken) { + this.prevPageToken = prevPageToken; + return this; + } + + public String getPrevPageToken() { + return prevPageToken; + } + + public ListUpdatesResponsePb setUpdates(Collection updates) { + this.updates = updates; + return this; + } + + public Collection getUpdates() { + return updates; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUpdatesResponsePb that = (ListUpdatesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(prevPageToken, that.prevPageToken) + && Objects.equals(updates, that.updates); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, prevPageToken, updates); + } + + @Override + public String toString() { + return new ToStringer(ListUpdatesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("prevPageToken", prevPageToken) + .add("updates", updates) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTrigger.java index ba8ac321e..fc3b904f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTrigger.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTrigger.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ManualTrigger.ManualTriggerSerializer.class) +@JsonDeserialize(using = ManualTrigger.ManualTriggerDeserializer.class) public class ManualTrigger { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(ManualTrigger.class).toString(); } + + ManualTriggerPb toPb() { + ManualTriggerPb pb = new ManualTriggerPb(); + + return pb; + } + + static ManualTrigger fromPb(ManualTriggerPb pb) { + ManualTrigger model = new ManualTrigger(); + + return model; + } + + public static class ManualTriggerSerializer extends JsonSerializer { + @Override + public void serialize(ManualTrigger value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ManualTriggerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ManualTriggerDeserializer extends JsonDeserializer { + @Override + public ManualTrigger deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ManualTriggerPb pb = mapper.readValue(p, ManualTriggerPb.class); + return ManualTrigger.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTriggerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTriggerPb.java new file mode 100755 index 000000000..e1ce40a72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ManualTriggerPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ManualTriggerPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ManualTriggerPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java index 42bba0df5..21f835548 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NotebookLibrary.NotebookLibrarySerializer.class) +@JsonDeserialize(using = NotebookLibrary.NotebookLibraryDeserializer.class) public class NotebookLibrary { /** The absolute path of the source code. */ - @JsonProperty("path") private String path; public NotebookLibrary setPath(String path) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(NotebookLibrary.class).add("path", path).toString(); } + + NotebookLibraryPb toPb() { + NotebookLibraryPb pb = new NotebookLibraryPb(); + pb.setPath(path); + + return pb; + } + + static NotebookLibrary fromPb(NotebookLibraryPb pb) { + NotebookLibrary model = new NotebookLibrary(); + model.setPath(pb.getPath()); + + return model; + } + + public static class NotebookLibrarySerializer extends JsonSerializer { + @Override + public void serialize(NotebookLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotebookLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotebookLibraryDeserializer extends JsonDeserializer { + @Override + public NotebookLibrary deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotebookLibraryPb pb = mapper.readValue(p, NotebookLibraryPb.class); + return NotebookLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibraryPb.java new file mode 100755 index 000000000..3f6ef19d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibraryPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NotebookLibraryPb { + @JsonProperty("path") + private String path; + + public NotebookLibraryPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotebookLibraryPb that = (NotebookLibraryPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(NotebookLibraryPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java index 430fa4b46..4a652b8a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Notifications.NotificationsSerializer.class) +@JsonDeserialize(using = Notifications.NotificationsDeserializer.class) public class Notifications { /** * A list of alerts that trigger the sending of notifications to the configured destinations. The @@ -18,11 +29,9 @@ public class Notifications { * time a pipeline update fails. * `on-update-fatal-failure`: A pipeline update fails with a * non-retryable (fatal) error. * `on-flow-failure`: A single data flow fails. */ - @JsonProperty("alerts") private Collection alerts; /** A list of email addresses notified when a configured alert is triggered. */ - @JsonProperty("email_recipients") private Collection emailRecipients; public Notifications setAlerts(Collection alerts) { @@ -64,4 +73,39 @@ public String toString() { .add("emailRecipients", emailRecipients) .toString(); } + + NotificationsPb toPb() { + NotificationsPb pb = new NotificationsPb(); + pb.setAlerts(alerts); + pb.setEmailRecipients(emailRecipients); + + return pb; + } + + static Notifications fromPb(NotificationsPb pb) { + Notifications model = new Notifications(); + model.setAlerts(pb.getAlerts()); + model.setEmailRecipients(pb.getEmailRecipients()); + + return model; + } + + public static class NotificationsSerializer extends JsonSerializer { + @Override + public void serialize(Notifications value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotificationsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotificationsDeserializer extends JsonDeserializer { + @Override + public Notifications deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotificationsPb pb = mapper.readValue(p, NotificationsPb.class); + return Notifications.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotificationsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotificationsPb.java new file mode 100755 index 000000000..fcb5c91d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotificationsPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class NotificationsPb { + @JsonProperty("alerts") + private Collection alerts; + + @JsonProperty("email_recipients") + private Collection emailRecipients; + + public NotificationsPb setAlerts(Collection alerts) { + this.alerts = alerts; + return this; + } + + public Collection getAlerts() { + return alerts; + } + + public NotificationsPb setEmailRecipients(Collection emailRecipients) { + this.emailRecipients = emailRecipients; + return this; + } + + public Collection getEmailRecipients() { + return emailRecipients; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotificationsPb that = (NotificationsPb) o; + return Objects.equals(alerts, that.alerts) + && Objects.equals(emailRecipients, that.emailRecipients); + } + + @Override + public int hashCode() { + return Objects.hash(alerts, emailRecipients); + } + + @Override + public String toString() { + return new ToStringer(NotificationsPb.class) + .add("alerts", alerts) + .add("emailRecipients", emailRecipients) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java index 9e36878d7..993318946 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java @@ -4,80 +4,74 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Origin.OriginSerializer.class) +@JsonDeserialize(using = Origin.OriginDeserializer.class) public class Origin { /** The id of a batch. Unique within a flow. */ - @JsonProperty("batch_id") private Long batchId; /** The cloud provider, e.g., AWS or Azure. */ - @JsonProperty("cloud") private String cloud; /** The id of the cluster where an execution happens. Unique within a region. */ - @JsonProperty("cluster_id") private String clusterId; /** The name of a dataset. Unique within a pipeline. */ - @JsonProperty("dataset_name") private String datasetName; /** * The id of the flow. Globally unique. Incremental queries will generally reuse the same id while * complete queries will have a new id per update. */ - @JsonProperty("flow_id") private String flowId; /** The name of the flow. Not unique. */ - @JsonProperty("flow_name") private String flowName; /** The optional host name where the event was triggered */ - @JsonProperty("host") private String host; /** The id of a maintenance run. Globally unique. */ - @JsonProperty("maintenance_id") private String maintenanceId; /** Materialization name. */ - @JsonProperty("materialization_name") private String materializationName; /** The org id of the user. Unique within a cloud. */ - @JsonProperty("org_id") private Long orgId; /** The id of the pipeline. Globally unique. */ - @JsonProperty("pipeline_id") private String pipelineId; /** The name of the pipeline. Not unique. */ - @JsonProperty("pipeline_name") private String pipelineName; /** The cloud region. */ - @JsonProperty("region") private String region; /** The id of the request that caused an update. */ - @JsonProperty("request_id") private String requestId; /** The id of a (delta) table. Globally unique. */ - @JsonProperty("table_id") private String tableId; /** The Unity Catalog id of the MV or ST being updated. */ - @JsonProperty("uc_resource_id") private String ucResourceId; /** The id of an execution. Globally unique. */ - @JsonProperty("update_id") private String updateId; public Origin setBatchId(Long batchId) { @@ -301,4 +295,69 @@ public String toString() { .add("updateId", updateId) .toString(); } + + OriginPb toPb() { + OriginPb pb = new OriginPb(); + pb.setBatchId(batchId); + pb.setCloud(cloud); + pb.setClusterId(clusterId); + pb.setDatasetName(datasetName); + pb.setFlowId(flowId); + pb.setFlowName(flowName); + pb.setHost(host); + pb.setMaintenanceId(maintenanceId); + pb.setMaterializationName(materializationName); + pb.setOrgId(orgId); + pb.setPipelineId(pipelineId); + pb.setPipelineName(pipelineName); + pb.setRegion(region); + pb.setRequestId(requestId); + pb.setTableId(tableId); + pb.setUcResourceId(ucResourceId); + pb.setUpdateId(updateId); + + return pb; + } + + static Origin fromPb(OriginPb pb) { + Origin model = new Origin(); + model.setBatchId(pb.getBatchId()); + model.setCloud(pb.getCloud()); + model.setClusterId(pb.getClusterId()); + model.setDatasetName(pb.getDatasetName()); + model.setFlowId(pb.getFlowId()); + model.setFlowName(pb.getFlowName()); + model.setHost(pb.getHost()); + model.setMaintenanceId(pb.getMaintenanceId()); + model.setMaterializationName(pb.getMaterializationName()); + model.setOrgId(pb.getOrgId()); + model.setPipelineId(pb.getPipelineId()); + model.setPipelineName(pb.getPipelineName()); + model.setRegion(pb.getRegion()); + model.setRequestId(pb.getRequestId()); + model.setTableId(pb.getTableId()); + model.setUcResourceId(pb.getUcResourceId()); + model.setUpdateId(pb.getUpdateId()); + + return model; + } + + public static class OriginSerializer extends JsonSerializer { + @Override + public void serialize(Origin value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OriginPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OriginDeserializer extends JsonDeserializer { + @Override + public Origin deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OriginPb pb = mapper.readValue(p, OriginPb.class); + return Origin.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OriginPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OriginPb.java new file mode 100755 index 000000000..682e1b4ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OriginPb.java @@ -0,0 +1,284 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class OriginPb { + @JsonProperty("batch_id") + private Long batchId; + + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("dataset_name") + private String datasetName; + + @JsonProperty("flow_id") + private String flowId; + + @JsonProperty("flow_name") + private String flowName; + + @JsonProperty("host") + private String host; + + @JsonProperty("maintenance_id") + private String maintenanceId; + + @JsonProperty("materialization_name") + private String materializationName; + + @JsonProperty("org_id") + private Long orgId; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("pipeline_name") + private String pipelineName; + + @JsonProperty("region") + private String region; + + @JsonProperty("request_id") + private String requestId; + + @JsonProperty("table_id") + private String tableId; + + @JsonProperty("uc_resource_id") + private String ucResourceId; + + @JsonProperty("update_id") + private String updateId; + + public OriginPb setBatchId(Long batchId) { + this.batchId = batchId; + return this; + } + + public Long getBatchId() { + return batchId; + } + + public OriginPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public OriginPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public OriginPb setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + public OriginPb setFlowId(String flowId) { + this.flowId = flowId; + return this; + } + + public String getFlowId() { + return flowId; + } + + public OriginPb setFlowName(String flowName) { + this.flowName = flowName; + return this; + } + + public String getFlowName() { + return flowName; + } + + public OriginPb setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + public OriginPb setMaintenanceId(String maintenanceId) { + this.maintenanceId = maintenanceId; + return this; + } + + public String getMaintenanceId() { + return maintenanceId; + } + + public OriginPb setMaterializationName(String materializationName) { + this.materializationName = materializationName; + return this; + } + + public String getMaterializationName() { + return materializationName; + } + + public OriginPb setOrgId(Long orgId) { + this.orgId = orgId; + return this; + } + + public Long getOrgId() { + return orgId; + } + + public OriginPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public OriginPb setPipelineName(String pipelineName) { + this.pipelineName = pipelineName; + return this; + } + + public String getPipelineName() { + return pipelineName; + } + + public OriginPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public OriginPb setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + public OriginPb setTableId(String tableId) { + this.tableId = tableId; + return this; + } + + public String getTableId() { + return tableId; + } + + public OriginPb setUcResourceId(String ucResourceId) { + this.ucResourceId = ucResourceId; + return this; + } + + public String getUcResourceId() { + return ucResourceId; + } + + public OriginPb setUpdateId(String updateId) { + this.updateId = updateId; + return this; + } + + public String getUpdateId() { + return updateId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OriginPb that = (OriginPb) o; + return Objects.equals(batchId, that.batchId) + && Objects.equals(cloud, that.cloud) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(flowId, that.flowId) + && Objects.equals(flowName, that.flowName) + && Objects.equals(host, that.host) + && Objects.equals(maintenanceId, that.maintenanceId) + && Objects.equals(materializationName, that.materializationName) + && Objects.equals(orgId, that.orgId) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(pipelineName, that.pipelineName) + && Objects.equals(region, that.region) + && Objects.equals(requestId, that.requestId) + && Objects.equals(tableId, that.tableId) + && Objects.equals(ucResourceId, that.ucResourceId) + && Objects.equals(updateId, that.updateId); + } + + @Override + public int hashCode() { + return Objects.hash( + batchId, + cloud, + clusterId, + datasetName, + flowId, + flowName, + host, + maintenanceId, + materializationName, + orgId, + pipelineId, + pipelineName, + region, + requestId, + tableId, + ucResourceId, + updateId); + } + + @Override + public String toString() { + return new ToStringer(OriginPb.class) + .add("batchId", batchId) + .add("cloud", cloud) + .add("clusterId", clusterId) + .add("datasetName", datasetName) + .add("flowId", flowId) + .add("flowName", flowName) + .add("host", host) + .add("maintenanceId", maintenanceId) + .add("materializationName", materializationName) + .add("orgId", orgId) + .add("pipelineId", pipelineId) + .add("pipelineName", pipelineName) + .add("region", region) + .add("requestId", requestId) + .add("tableId", tableId) + .add("ucResourceId", ucResourceId) + .add("updateId", updateId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java index 2a764890d..ea1f967d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PathPattern.PathPatternSerializer.class) +@JsonDeserialize(using = PathPattern.PathPatternDeserializer.class) public class PathPattern { /** The source code to include for pipelines */ - @JsonProperty("include") private String include; public PathPattern setInclude(String include) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(PathPattern.class).add("include", include).toString(); } + + PathPatternPb toPb() { + PathPatternPb pb = new PathPatternPb(); + pb.setInclude(include); + + return pb; + } + + static PathPattern fromPb(PathPatternPb pb) { + PathPattern model = new PathPattern(); + model.setInclude(pb.getInclude()); + + return model; + } + + public static class PathPatternSerializer extends JsonSerializer { + @Override + public void serialize(PathPattern value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PathPatternPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PathPatternDeserializer extends JsonDeserializer { + @Override + public PathPattern deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PathPatternPb pb = mapper.readValue(p, PathPatternPb.class); + return PathPattern.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPatternPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPatternPb.java new file mode 100755 index 000000000..5ddc5540c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPatternPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PathPatternPb { + @JsonProperty("include") + private String include; + + public PathPatternPb setInclude(String include) { + this.include = include; + return this; + } + + public String getInclude() { + return include; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PathPatternPb that = (PathPatternPb) o; + return Objects.equals(include, that.include); + } + + @Override + public int hashCode() { + return Objects.hash(include); + } + + @Override + public String toString() { + return new ToStringer(PathPatternPb.class).add("include", include).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java index 27b567277..f00d5bd76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineAccessControlRequest.PipelineAccessControlRequestSerializer.class) +@JsonDeserialize( + using = PipelineAccessControlRequest.PipelineAccessControlRequestDeserializer.class) public class PipelineAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public PipelineAccessControlRequest setGroupName(String groupName) { @@ -86,4 +94,47 @@ public String toString() { .add("userName", userName) .toString(); } + + PipelineAccessControlRequestPb toPb() { + PipelineAccessControlRequestPb pb = new PipelineAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static PipelineAccessControlRequest fromPb(PipelineAccessControlRequestPb pb) { + PipelineAccessControlRequest model = new PipelineAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class PipelineAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PipelineAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public PipelineAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineAccessControlRequestPb pb = mapper.readValue(p, PipelineAccessControlRequestPb.class); + return PipelineAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequestPb.java new file mode 100755 index 000000000..ec5c06de5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private PipelinePermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public PipelineAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public PipelineAccessControlRequestPb setPermissionLevel( + PipelinePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PipelinePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public PipelineAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public PipelineAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineAccessControlRequestPb that = (PipelineAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(PipelineAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java index 5aff63145..4a6a1e263 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineAccessControlResponse.PipelineAccessControlResponseSerializer.class) +@JsonDeserialize( + using = PipelineAccessControlResponse.PipelineAccessControlResponseDeserializer.class) public class PipelineAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public PipelineAccessControlResponse setAllPermissions( @@ -103,4 +110,50 @@ public String toString() { .add("userName", userName) .toString(); } + + PipelineAccessControlResponsePb toPb() { + PipelineAccessControlResponsePb pb = new PipelineAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static PipelineAccessControlResponse fromPb(PipelineAccessControlResponsePb pb) { + PipelineAccessControlResponse model = new PipelineAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class PipelineAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + PipelineAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public PipelineAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineAccessControlResponsePb pb = + mapper.readValue(p, PipelineAccessControlResponsePb.class); + return PipelineAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponsePb.java new file mode 100755 index 000000000..a5dba4aca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PipelineAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public PipelineAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public PipelineAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public PipelineAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public PipelineAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public PipelineAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineAccessControlResponsePb that = (PipelineAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(PipelineAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java index 2eb27a477..f8c0258e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java @@ -4,36 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineCluster.PipelineClusterSerializer.class) +@JsonDeserialize(using = PipelineCluster.PipelineClusterDeserializer.class) public class PipelineCluster { /** Note: This field won't be persisted. Only API users will check this field. */ - @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. */ - @JsonProperty("autoscale") private PipelineClusterAutoscale autoscale; /** * Attributes related to clusters running on Amazon Web Services. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("aws_attributes") private com.databricks.sdk.service.compute.AwsAttributes awsAttributes; /** * Attributes related to clusters running on Microsoft Azure. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("azure_attributes") private com.databricks.sdk.service.compute.AzureAttributes azureAttributes; /** @@ -43,7 +50,6 @@ public class PipelineCluster { * driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is * `$destination/$clusterId/executor`. */ - @JsonProperty("cluster_log_conf") private com.databricks.sdk.service.compute.ClusterLogConf clusterLogConf; /** @@ -55,32 +61,27 @@ public class PipelineCluster { *

- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster * tags */ - @JsonProperty("custom_tags") private Map customTags; /** * The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster * uses the instance pool with id (instance_pool_id) if the driver pool is not assigned. */ - @JsonProperty("driver_instance_pool_id") private String driverInstancePoolId; /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. */ - @JsonProperty("driver_node_type_id") private String driverNodeTypeId; /** Whether to enable local disk encryption for the cluster. */ - @JsonProperty("enable_local_disk_encryption") private Boolean enableLocalDiskEncryption; /** * Attributes related to clusters running on Google Cloud Platform. If not specified at cluster * creation, a set of default values will be used. */ - @JsonProperty("gcp_attributes") private com.databricks.sdk.service.compute.GcpAttributes gcpAttributes; /** @@ -88,11 +89,9 @@ public class PipelineCluster { * scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, * init script logs are sent to `//init_scripts`. */ - @JsonProperty("init_scripts") private Collection initScripts; /** The optional ID of the instance pool to which the cluster belongs. */ - @JsonProperty("instance_pool_id") private String instancePoolId; /** @@ -100,7 +99,6 @@ public class PipelineCluster { * `maintenance` to configure the maintenance cluster. This field is optional. The default value * is `default`. */ - @JsonProperty("label") private String label; /** @@ -109,7 +107,6 @@ public class PipelineCluster { * compute intensive workloads. A list of available node types can be retrieved by using the * :method:clusters/listNodeTypes API call. */ - @JsonProperty("node_type_id") private String nodeTypeId; /** @@ -122,18 +119,15 @@ public class PipelineCluster { * workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the * new nodes are provisioned. */ - @JsonProperty("num_workers") private Long numWorkers; /** The ID of the cluster policy used to create the cluster if applicable. */ - @JsonProperty("policy_id") private String policyId; /** * An object containing a set of optional, user-specified Spark configuration key-value pairs. See * :method:clusters/create for more details. */ - @JsonProperty("spark_conf") private Map sparkConf; /** @@ -149,7 +143,6 @@ public class PipelineCluster { * "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS * -Dspark.shuffle.service.enabled=true"}` */ - @JsonProperty("spark_env_vars") private Map sparkEnvVars; /** @@ -157,7 +150,6 @@ public class PipelineCluster { * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up * to 10 keys can be specified. */ - @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; public PipelineCluster setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { @@ -410,4 +402,74 @@ public String toString() { .add("sshPublicKeys", sshPublicKeys) .toString(); } + + PipelineClusterPb toPb() { + PipelineClusterPb pb = new PipelineClusterPb(); + pb.setApplyPolicyDefaultValues(applyPolicyDefaultValues); + pb.setAutoscale(autoscale); + pb.setAwsAttributes(awsAttributes); + pb.setAzureAttributes(azureAttributes); + pb.setClusterLogConf(clusterLogConf); + pb.setCustomTags(customTags); + pb.setDriverInstancePoolId(driverInstancePoolId); + pb.setDriverNodeTypeId(driverNodeTypeId); + pb.setEnableLocalDiskEncryption(enableLocalDiskEncryption); + pb.setGcpAttributes(gcpAttributes); + pb.setInitScripts(initScripts); + pb.setInstancePoolId(instancePoolId); + pb.setLabel(label); + pb.setNodeTypeId(nodeTypeId); + pb.setNumWorkers(numWorkers); + pb.setPolicyId(policyId); + pb.setSparkConf(sparkConf); + pb.setSparkEnvVars(sparkEnvVars); + pb.setSshPublicKeys(sshPublicKeys); + + return pb; + } + + static PipelineCluster fromPb(PipelineClusterPb pb) { + PipelineCluster model = new PipelineCluster(); + model.setApplyPolicyDefaultValues(pb.getApplyPolicyDefaultValues()); + model.setAutoscale(pb.getAutoscale()); + model.setAwsAttributes(pb.getAwsAttributes()); + model.setAzureAttributes(pb.getAzureAttributes()); + model.setClusterLogConf(pb.getClusterLogConf()); + model.setCustomTags(pb.getCustomTags()); + model.setDriverInstancePoolId(pb.getDriverInstancePoolId()); + model.setDriverNodeTypeId(pb.getDriverNodeTypeId()); + model.setEnableLocalDiskEncryption(pb.getEnableLocalDiskEncryption()); + model.setGcpAttributes(pb.getGcpAttributes()); + model.setInitScripts(pb.getInitScripts()); + model.setInstancePoolId(pb.getInstancePoolId()); + model.setLabel(pb.getLabel()); + model.setNodeTypeId(pb.getNodeTypeId()); + model.setNumWorkers(pb.getNumWorkers()); + model.setPolicyId(pb.getPolicyId()); + model.setSparkConf(pb.getSparkConf()); + model.setSparkEnvVars(pb.getSparkEnvVars()); + model.setSshPublicKeys(pb.getSshPublicKeys()); + + return model; + } + + public static class PipelineClusterSerializer extends JsonSerializer { + @Override + public void serialize(PipelineCluster value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineClusterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineClusterDeserializer extends JsonDeserializer { + @Override + public PipelineCluster deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineClusterPb pb = mapper.readValue(p, PipelineClusterPb.class); + return PipelineCluster.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscale.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscale.java index c06d51ca4..496d159e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscale.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscale.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineClusterAutoscale.PipelineClusterAutoscaleSerializer.class) +@JsonDeserialize(using = PipelineClusterAutoscale.PipelineClusterAutoscaleDeserializer.class) public class PipelineClusterAutoscale { /** * The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` * must be strictly greater than `min_workers`. */ - @JsonProperty("max_workers") private Long maxWorkers; /** * The minimum number of workers the cluster can scale down to when underutilized. It is also the * initial number of workers the cluster will have after creation. */ - @JsonProperty("min_workers") private Long minWorkers; /** @@ -29,7 +38,6 @@ public class PipelineClusterAutoscale { * of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy * autoscaling feature is used for `maintenance` clusters. */ - @JsonProperty("mode") private PipelineClusterAutoscaleMode mode; public PipelineClusterAutoscale setMaxWorkers(Long maxWorkers) { @@ -82,4 +90,45 @@ public String toString() { .add("mode", mode) .toString(); } + + PipelineClusterAutoscalePb toPb() { + PipelineClusterAutoscalePb pb = new PipelineClusterAutoscalePb(); + pb.setMaxWorkers(maxWorkers); + pb.setMinWorkers(minWorkers); + pb.setMode(mode); + + return pb; + } + + static PipelineClusterAutoscale fromPb(PipelineClusterAutoscalePb pb) { + PipelineClusterAutoscale model = new PipelineClusterAutoscale(); + model.setMaxWorkers(pb.getMaxWorkers()); + model.setMinWorkers(pb.getMinWorkers()); + model.setMode(pb.getMode()); + + return model; + } + + public static class PipelineClusterAutoscaleSerializer + extends JsonSerializer { + @Override + public void serialize( + PipelineClusterAutoscale value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineClusterAutoscalePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineClusterAutoscaleDeserializer + extends JsonDeserializer { + @Override + public PipelineClusterAutoscale deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineClusterAutoscalePb pb = mapper.readValue(p, PipelineClusterAutoscalePb.class); + return PipelineClusterAutoscale.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscalePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscalePb.java new file mode 100755 index 000000000..589ce052d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscalePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineClusterAutoscalePb { + @JsonProperty("max_workers") + private Long maxWorkers; + + @JsonProperty("min_workers") + private Long minWorkers; + + @JsonProperty("mode") + private PipelineClusterAutoscaleMode mode; + + public PipelineClusterAutoscalePb setMaxWorkers(Long maxWorkers) { + this.maxWorkers = maxWorkers; + return this; + } + + public Long getMaxWorkers() { + return maxWorkers; + } + + public PipelineClusterAutoscalePb setMinWorkers(Long minWorkers) { + this.minWorkers = minWorkers; + return this; + } + + public Long getMinWorkers() { + return minWorkers; + } + + public PipelineClusterAutoscalePb setMode(PipelineClusterAutoscaleMode mode) { + this.mode = mode; + return this; + } + + public PipelineClusterAutoscaleMode getMode() { + return mode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineClusterAutoscalePb that = (PipelineClusterAutoscalePb) o; + return Objects.equals(maxWorkers, that.maxWorkers) + && Objects.equals(minWorkers, that.minWorkers) + && Objects.equals(mode, that.mode); + } + + @Override + public int hashCode() { + return Objects.hash(maxWorkers, minWorkers, mode); + } + + @Override + public String toString() { + return new ToStringer(PipelineClusterAutoscalePb.class) + .add("maxWorkers", maxWorkers) + .add("minWorkers", minWorkers) + .add("mode", mode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterPb.java new file mode 100755 index 000000000..277e7441d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterPb.java @@ -0,0 +1,321 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class PipelineClusterPb { + @JsonProperty("apply_policy_default_values") + private Boolean applyPolicyDefaultValues; + + @JsonProperty("autoscale") + private PipelineClusterAutoscale autoscale; + + @JsonProperty("aws_attributes") + private com.databricks.sdk.service.compute.AwsAttributes awsAttributes; + + @JsonProperty("azure_attributes") + private com.databricks.sdk.service.compute.AzureAttributes azureAttributes; + + @JsonProperty("cluster_log_conf") + private com.databricks.sdk.service.compute.ClusterLogConf clusterLogConf; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("driver_instance_pool_id") + private String driverInstancePoolId; + + @JsonProperty("driver_node_type_id") + private String driverNodeTypeId; + + @JsonProperty("enable_local_disk_encryption") + private Boolean enableLocalDiskEncryption; + + @JsonProperty("gcp_attributes") + private com.databricks.sdk.service.compute.GcpAttributes gcpAttributes; + + @JsonProperty("init_scripts") + private Collection initScripts; + + @JsonProperty("instance_pool_id") + private String instancePoolId; + + @JsonProperty("label") + private String label; + + @JsonProperty("node_type_id") + private String nodeTypeId; + + @JsonProperty("num_workers") + private Long numWorkers; + + @JsonProperty("policy_id") + private String policyId; + + @JsonProperty("spark_conf") + private Map sparkConf; + + @JsonProperty("spark_env_vars") + private Map sparkEnvVars; + + @JsonProperty("ssh_public_keys") + private Collection sshPublicKeys; + + public PipelineClusterPb setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { + this.applyPolicyDefaultValues = applyPolicyDefaultValues; + return this; + } + + public Boolean getApplyPolicyDefaultValues() { + return applyPolicyDefaultValues; + } + + public PipelineClusterPb setAutoscale(PipelineClusterAutoscale autoscale) { + this.autoscale = autoscale; + return this; + } + + public PipelineClusterAutoscale getAutoscale() { + return autoscale; + } + + public PipelineClusterPb setAwsAttributes( + com.databricks.sdk.service.compute.AwsAttributes awsAttributes) { + this.awsAttributes = awsAttributes; + return this; + } + + public com.databricks.sdk.service.compute.AwsAttributes getAwsAttributes() { + return awsAttributes; + } + + public PipelineClusterPb setAzureAttributes( + com.databricks.sdk.service.compute.AzureAttributes azureAttributes) { + this.azureAttributes = azureAttributes; + return this; + } + + public com.databricks.sdk.service.compute.AzureAttributes getAzureAttributes() { + return azureAttributes; + } + + public PipelineClusterPb setClusterLogConf( + com.databricks.sdk.service.compute.ClusterLogConf clusterLogConf) { + this.clusterLogConf = clusterLogConf; + return this; + } + + public com.databricks.sdk.service.compute.ClusterLogConf getClusterLogConf() { + return clusterLogConf; + } + + public PipelineClusterPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public PipelineClusterPb setDriverInstancePoolId(String driverInstancePoolId) { + this.driverInstancePoolId = driverInstancePoolId; + return this; + } + + public String getDriverInstancePoolId() { + return driverInstancePoolId; + } + + public PipelineClusterPb setDriverNodeTypeId(String driverNodeTypeId) { + this.driverNodeTypeId = driverNodeTypeId; + return this; + } + + public String getDriverNodeTypeId() { + return driverNodeTypeId; + } + + public PipelineClusterPb setEnableLocalDiskEncryption(Boolean enableLocalDiskEncryption) { + this.enableLocalDiskEncryption = enableLocalDiskEncryption; + return this; + } + + public Boolean getEnableLocalDiskEncryption() { + return enableLocalDiskEncryption; + } + + public PipelineClusterPb setGcpAttributes( + com.databricks.sdk.service.compute.GcpAttributes gcpAttributes) { + this.gcpAttributes = gcpAttributes; + return this; + } + + public com.databricks.sdk.service.compute.GcpAttributes getGcpAttributes() { + return gcpAttributes; + } + + public PipelineClusterPb setInitScripts( + Collection initScripts) { + this.initScripts = initScripts; + return this; + } + + public Collection getInitScripts() { + return initScripts; + } + + public PipelineClusterPb setInstancePoolId(String instancePoolId) { + this.instancePoolId = instancePoolId; + return this; + } + + public String getInstancePoolId() { + return instancePoolId; + } + + public PipelineClusterPb setLabel(String label) { + this.label = label; + return this; + } + + public String getLabel() { + return label; + } + + public PipelineClusterPb setNodeTypeId(String nodeTypeId) { + this.nodeTypeId = nodeTypeId; + return this; + } + + public String getNodeTypeId() { + return nodeTypeId; + } + + public PipelineClusterPb setNumWorkers(Long numWorkers) { + this.numWorkers = numWorkers; + return this; + } + + public Long getNumWorkers() { + return numWorkers; + } + + public PipelineClusterPb setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public PipelineClusterPb setSparkConf(Map sparkConf) { + this.sparkConf = sparkConf; + return this; + } + + public Map getSparkConf() { + return sparkConf; + } + + public PipelineClusterPb setSparkEnvVars(Map sparkEnvVars) { + this.sparkEnvVars = sparkEnvVars; + return this; + } + + public Map getSparkEnvVars() { + return sparkEnvVars; + } + + public PipelineClusterPb setSshPublicKeys(Collection sshPublicKeys) { + this.sshPublicKeys = sshPublicKeys; + return this; + } + + public Collection getSshPublicKeys() { + return sshPublicKeys; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineClusterPb that = (PipelineClusterPb) o; + return Objects.equals(applyPolicyDefaultValues, that.applyPolicyDefaultValues) + && Objects.equals(autoscale, that.autoscale) + && Objects.equals(awsAttributes, that.awsAttributes) + && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(clusterLogConf, that.clusterLogConf) + && Objects.equals(customTags, that.customTags) + && Objects.equals(driverInstancePoolId, that.driverInstancePoolId) + && Objects.equals(driverNodeTypeId, that.driverNodeTypeId) + && Objects.equals(enableLocalDiskEncryption, that.enableLocalDiskEncryption) + && Objects.equals(gcpAttributes, that.gcpAttributes) + && Objects.equals(initScripts, that.initScripts) + && Objects.equals(instancePoolId, that.instancePoolId) + && Objects.equals(label, that.label) + && Objects.equals(nodeTypeId, that.nodeTypeId) + && Objects.equals(numWorkers, that.numWorkers) + && Objects.equals(policyId, that.policyId) + && Objects.equals(sparkConf, that.sparkConf) + && Objects.equals(sparkEnvVars, that.sparkEnvVars) + && Objects.equals(sshPublicKeys, that.sshPublicKeys); + } + + @Override + public int hashCode() { + return Objects.hash( + applyPolicyDefaultValues, + autoscale, + awsAttributes, + azureAttributes, + clusterLogConf, + customTags, + driverInstancePoolId, + driverNodeTypeId, + enableLocalDiskEncryption, + gcpAttributes, + initScripts, + instancePoolId, + label, + nodeTypeId, + numWorkers, + policyId, + sparkConf, + sparkEnvVars, + sshPublicKeys); + } + + @Override + public String toString() { + return new ToStringer(PipelineClusterPb.class) + .add("applyPolicyDefaultValues", applyPolicyDefaultValues) + .add("autoscale", autoscale) + .add("awsAttributes", awsAttributes) + .add("azureAttributes", azureAttributes) + .add("clusterLogConf", clusterLogConf) + .add("customTags", customTags) + .add("driverInstancePoolId", driverInstancePoolId) + .add("driverNodeTypeId", driverNodeTypeId) + .add("enableLocalDiskEncryption", enableLocalDiskEncryption) + .add("gcpAttributes", gcpAttributes) + .add("initScripts", initScripts) + .add("instancePoolId", instancePoolId) + .add("label", label) + .add("nodeTypeId", nodeTypeId) + .add("numWorkers", numWorkers) + .add("policyId", policyId) + .add("sparkConf", sparkConf) + .add("sparkEnvVars", sparkEnvVars) + .add("sshPublicKeys", sshPublicKeys) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java index 579d9f1f1..c857cdec1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineDeployment.PipelineDeploymentSerializer.class) +@JsonDeserialize(using = PipelineDeployment.PipelineDeploymentDeserializer.class) public class PipelineDeployment { /** The deployment method that manages the pipeline. */ - @JsonProperty("kind") private DeploymentKind kind; /** The path to the file containing metadata about the deployment. */ - @JsonProperty("metadata_file_path") private String metadataFilePath; public PipelineDeployment setKind(DeploymentKind kind) { @@ -56,4 +65,40 @@ public String toString() { .add("metadataFilePath", metadataFilePath) .toString(); } + + PipelineDeploymentPb toPb() { + PipelineDeploymentPb pb = new PipelineDeploymentPb(); + pb.setKind(kind); + pb.setMetadataFilePath(metadataFilePath); + + return pb; + } + + static PipelineDeployment fromPb(PipelineDeploymentPb pb) { + PipelineDeployment model = new PipelineDeployment(); + model.setKind(pb.getKind()); + model.setMetadataFilePath(pb.getMetadataFilePath()); + + return model; + } + + public static class PipelineDeploymentSerializer extends JsonSerializer { + @Override + public void serialize(PipelineDeployment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineDeploymentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineDeploymentDeserializer extends JsonDeserializer { + @Override + public PipelineDeployment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineDeploymentPb pb = mapper.readValue(p, PipelineDeploymentPb.class); + return PipelineDeployment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeploymentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeploymentPb.java new file mode 100755 index 000000000..8c5bfe925 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeploymentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineDeploymentPb { + @JsonProperty("kind") + private DeploymentKind kind; + + @JsonProperty("metadata_file_path") + private String metadataFilePath; + + public PipelineDeploymentPb setKind(DeploymentKind kind) { + this.kind = kind; + return this; + } + + public DeploymentKind getKind() { + return kind; + } + + public PipelineDeploymentPb setMetadataFilePath(String metadataFilePath) { + this.metadataFilePath = metadataFilePath; + return this; + } + + public String getMetadataFilePath() { + return metadataFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineDeploymentPb that = (PipelineDeploymentPb) o; + return Objects.equals(kind, that.kind) + && Objects.equals(metadataFilePath, that.metadataFilePath); + } + + @Override + public int hashCode() { + return Objects.hash(kind, metadataFilePath); + } + + @Override + public String toString() { + return new ToStringer(PipelineDeploymentPb.class) + .add("kind", kind) + .add("metadataFilePath", metadataFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEvent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEvent.java index 2c4b21116..7f21630be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEvent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEvent.java @@ -4,45 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineEvent.PipelineEventSerializer.class) +@JsonDeserialize(using = PipelineEvent.PipelineEventDeserializer.class) public class PipelineEvent { /** Information about an error captured by the event. */ - @JsonProperty("error") private ErrorDetail error; /** The event type. Should always correspond to the details */ - @JsonProperty("event_type") private String eventType; /** A time-based, globally unique id. */ - @JsonProperty("id") private String id; /** The severity level of the event. */ - @JsonProperty("level") private EventLevel level; /** Maturity level for event_type. */ - @JsonProperty("maturity_level") private MaturityLevel maturityLevel; /** The display message associated with the event. */ - @JsonProperty("message") private String message; /** Describes where the event originates from. */ - @JsonProperty("origin") private Origin origin; /** A sequencing object to identify and order events. */ - @JsonProperty("sequence") private Sequencing sequence; /** The time of the event. */ - @JsonProperty("timestamp") private String timestamp; public PipelineEvent setError(ErrorDetail error) { @@ -162,4 +164,53 @@ public String toString() { .add("timestamp", timestamp) .toString(); } + + PipelineEventPb toPb() { + PipelineEventPb pb = new PipelineEventPb(); + pb.setError(error); + pb.setEventType(eventType); + pb.setId(id); + pb.setLevel(level); + pb.setMaturityLevel(maturityLevel); + pb.setMessage(message); + pb.setOrigin(origin); + pb.setSequence(sequence); + pb.setTimestamp(timestamp); + + return pb; + } + + static PipelineEvent fromPb(PipelineEventPb pb) { + PipelineEvent model = new PipelineEvent(); + model.setError(pb.getError()); + model.setEventType(pb.getEventType()); + model.setId(pb.getId()); + model.setLevel(pb.getLevel()); + model.setMaturityLevel(pb.getMaturityLevel()); + model.setMessage(pb.getMessage()); + model.setOrigin(pb.getOrigin()); + model.setSequence(pb.getSequence()); + model.setTimestamp(pb.getTimestamp()); + + return model; + } + + public static class PipelineEventSerializer extends JsonSerializer { + @Override + public void serialize(PipelineEvent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineEventPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineEventDeserializer extends JsonDeserializer { + @Override + public PipelineEvent deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineEventPb pb = mapper.readValue(p, PipelineEventPb.class); + return PipelineEvent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEventPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEventPb.java new file mode 100755 index 000000000..4d8207fe9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEventPb.java @@ -0,0 +1,156 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineEventPb { + @JsonProperty("error") + private ErrorDetail error; + + @JsonProperty("event_type") + private String eventType; + + @JsonProperty("id") + private String id; + + @JsonProperty("level") + private EventLevel level; + + @JsonProperty("maturity_level") + private MaturityLevel maturityLevel; + + @JsonProperty("message") + private String message; + + @JsonProperty("origin") + private Origin origin; + + @JsonProperty("sequence") + private Sequencing sequence; + + @JsonProperty("timestamp") + private String timestamp; + + public PipelineEventPb setError(ErrorDetail error) { + this.error = error; + return this; + } + + public ErrorDetail getError() { + return error; + } + + public PipelineEventPb setEventType(String eventType) { + this.eventType = eventType; + return this; + } + + public String getEventType() { + return eventType; + } + + public PipelineEventPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public PipelineEventPb setLevel(EventLevel level) { + this.level = level; + return this; + } + + public EventLevel getLevel() { + return level; + } + + public PipelineEventPb setMaturityLevel(MaturityLevel maturityLevel) { + this.maturityLevel = maturityLevel; + return this; + } + + public MaturityLevel getMaturityLevel() { + return maturityLevel; + } + + public PipelineEventPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public PipelineEventPb setOrigin(Origin origin) { + this.origin = origin; + return this; + } + + public Origin getOrigin() { + return origin; + } + + public PipelineEventPb setSequence(Sequencing sequence) { + this.sequence = sequence; + return this; + } + + public Sequencing getSequence() { + return sequence; + } + + public PipelineEventPb setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineEventPb that = (PipelineEventPb) o; + return Objects.equals(error, that.error) + && Objects.equals(eventType, that.eventType) + && Objects.equals(id, that.id) + && Objects.equals(level, that.level) + && Objects.equals(maturityLevel, that.maturityLevel) + && Objects.equals(message, that.message) + && Objects.equals(origin, that.origin) + && Objects.equals(sequence, that.sequence) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash( + error, eventType, id, level, maturityLevel, message, origin, sequence, timestamp); + } + + @Override + public String toString() { + return new ToStringer(PipelineEventPb.class) + .add("error", error) + .add("eventType", eventType) + .add("id", id) + .add("level", level) + .add("maturityLevel", maturityLevel) + .add("message", message) + .add("origin", origin) + .add("sequence", sequence) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java index 45f63133b..e988711f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java @@ -4,36 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineLibrary.PipelineLibrarySerializer.class) +@JsonDeserialize(using = PipelineLibrary.PipelineLibraryDeserializer.class) public class PipelineLibrary { /** The path to a file that defines a pipeline and is stored in the Databricks Repos. */ - @JsonProperty("file") private FileLibrary file; /** * The unified field to include source codes. Each entry can be a notebook path, a file path, or a * folder path that ends `/**`. This field cannot be used together with `notebook` or `file`. */ - @JsonProperty("glob") private PathPattern glob; /** URI of the jar to be installed. Currently only DBFS is supported. */ - @JsonProperty("jar") private String jar; /** Specification of a maven library to be installed. */ - @JsonProperty("maven") private com.databricks.sdk.service.compute.MavenLibrary maven; /** The path to a notebook that defines a pipeline and is stored in the Databricks workspace. */ - @JsonProperty("notebook") private NotebookLibrary notebook; /** URI of the whl to be installed. */ - @JsonProperty("whl") private String whl; public PipelineLibrary setFile(FileLibrary file) { @@ -119,4 +124,48 @@ public String toString() { .add("whl", whl) .toString(); } + + PipelineLibraryPb toPb() { + PipelineLibraryPb pb = new PipelineLibraryPb(); + pb.setFile(file); + pb.setGlob(glob); + pb.setJar(jar); + pb.setMaven(maven); + pb.setNotebook(notebook); + pb.setWhl(whl); + + return pb; + } + + static PipelineLibrary fromPb(PipelineLibraryPb pb) { + PipelineLibrary model = new PipelineLibrary(); + model.setFile(pb.getFile()); + model.setGlob(pb.getGlob()); + model.setJar(pb.getJar()); + model.setMaven(pb.getMaven()); + model.setNotebook(pb.getNotebook()); + model.setWhl(pb.getWhl()); + + return model; + } + + public static class PipelineLibrarySerializer extends JsonSerializer { + @Override + public void serialize(PipelineLibrary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineLibraryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineLibraryDeserializer extends JsonDeserializer { + @Override + public PipelineLibrary deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineLibraryPb pb = mapper.readValue(p, PipelineLibraryPb.class); + return PipelineLibrary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibraryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibraryPb.java new file mode 100755 index 000000000..a0efdca3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibraryPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineLibraryPb { + @JsonProperty("file") + private FileLibrary file; + + @JsonProperty("glob") + private PathPattern glob; + + @JsonProperty("jar") + private String jar; + + @JsonProperty("maven") + private com.databricks.sdk.service.compute.MavenLibrary maven; + + @JsonProperty("notebook") + private NotebookLibrary notebook; + + @JsonProperty("whl") + private String whl; + + public PipelineLibraryPb setFile(FileLibrary file) { + this.file = file; + return this; + } + + public FileLibrary getFile() { + return file; + } + + public PipelineLibraryPb setGlob(PathPattern glob) { + this.glob = glob; + return this; + } + + public PathPattern getGlob() { + return glob; + } + + public PipelineLibraryPb setJar(String jar) { + this.jar = jar; + return this; + } + + public String getJar() { + return jar; + } + + public PipelineLibraryPb setMaven(com.databricks.sdk.service.compute.MavenLibrary maven) { + this.maven = maven; + return this; + } + + public com.databricks.sdk.service.compute.MavenLibrary getMaven() { + return maven; + } + + public PipelineLibraryPb setNotebook(NotebookLibrary notebook) { + this.notebook = notebook; + return this; + } + + public NotebookLibrary getNotebook() { + return notebook; + } + + public PipelineLibraryPb setWhl(String whl) { + this.whl = whl; + return this; + } + + public String getWhl() { + return whl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineLibraryPb that = (PipelineLibraryPb) o; + return Objects.equals(file, that.file) + && Objects.equals(glob, that.glob) + && Objects.equals(jar, that.jar) + && Objects.equals(maven, that.maven) + && Objects.equals(notebook, that.notebook) + && Objects.equals(whl, that.whl); + } + + @Override + public int hashCode() { + return Objects.hash(file, glob, jar, maven, notebook, whl); + } + + @Override + public String toString() { + return new ToStringer(PipelineLibraryPb.class) + .add("file", file) + .add("glob", glob) + .add("jar", jar) + .add("maven", maven) + .add("notebook", notebook) + .add("whl", whl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java index 3911806c8..fffd8cdee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PipelinePermission.PipelinePermissionSerializer.class) +@JsonDeserialize(using = PipelinePermission.PipelinePermissionDeserializer.class) public class PipelinePermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; public PipelinePermission setInherited(Boolean inherited) { @@ -72,4 +80,42 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PipelinePermissionPb toPb() { + PipelinePermissionPb pb = new PipelinePermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PipelinePermission fromPb(PipelinePermissionPb pb) { + PipelinePermission model = new PipelinePermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PipelinePermissionSerializer extends JsonSerializer { + @Override + public void serialize(PipelinePermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelinePermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelinePermissionDeserializer extends JsonDeserializer { + @Override + public PipelinePermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelinePermissionPb pb = mapper.readValue(p, PipelinePermissionPb.class); + return PipelinePermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionPb.java new file mode 100755 index 000000000..c2e3bc7b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PipelinePermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private PipelinePermissionLevel permissionLevel; + + public PipelinePermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public PipelinePermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public PipelinePermissionPb setPermissionLevel(PipelinePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PipelinePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelinePermissionPb that = (PipelinePermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PipelinePermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissions.java index b59dee80a..a8793da3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PipelinePermissions.PipelinePermissionsSerializer.class) +@JsonDeserialize(using = PipelinePermissions.PipelinePermissionsDeserializer.class) public class PipelinePermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public PipelinePermissions setAccessControlList( @@ -73,4 +81,43 @@ public String toString() { .add("objectType", objectType) .toString(); } + + PipelinePermissionsPb toPb() { + PipelinePermissionsPb pb = new PipelinePermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static PipelinePermissions fromPb(PipelinePermissionsPb pb) { + PipelinePermissions model = new PipelinePermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class PipelinePermissionsSerializer extends JsonSerializer { + @Override + public void serialize(PipelinePermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelinePermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelinePermissionsDeserializer + extends JsonDeserializer { + @Override + public PipelinePermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelinePermissionsPb pb = mapper.readValue(p, PipelinePermissionsPb.class); + return PipelinePermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java index cbe547625..3fbaaa5d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = PipelinePermissionsDescription.PipelinePermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = PipelinePermissionsDescription.PipelinePermissionsDescriptionDeserializer.class) public class PipelinePermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; public PipelinePermissionsDescription setDescription(String description) { @@ -57,4 +68,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + PipelinePermissionsDescriptionPb toPb() { + PipelinePermissionsDescriptionPb pb = new PipelinePermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static PipelinePermissionsDescription fromPb(PipelinePermissionsDescriptionPb pb) { + PipelinePermissionsDescription model = new PipelinePermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class PipelinePermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + PipelinePermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelinePermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelinePermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public PipelinePermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelinePermissionsDescriptionPb pb = + mapper.readValue(p, PipelinePermissionsDescriptionPb.class); + return PipelinePermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescriptionPb.java new file mode 100755 index 000000000..4dcbca306 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelinePermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private PipelinePermissionLevel permissionLevel; + + public PipelinePermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public PipelinePermissionsDescriptionPb setPermissionLevel( + PipelinePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PipelinePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelinePermissionsDescriptionPb that = (PipelinePermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(PipelinePermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsPb.java new file mode 100755 index 000000000..05c32297d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PipelinePermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public PipelinePermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public PipelinePermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public PipelinePermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelinePermissionsPb that = (PipelinePermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(PipelinePermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequest.java index 9f2971fe5..3f80aa325 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PipelinePermissionsRequest.PipelinePermissionsRequestSerializer.class) +@JsonDeserialize(using = PipelinePermissionsRequest.PipelinePermissionsRequestDeserializer.class) public class PipelinePermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The pipeline for which to get or manage permissions. */ - @JsonIgnore private String pipelineId; + private String pipelineId; public PipelinePermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("pipelineId", pipelineId) .toString(); } + + PipelinePermissionsRequestPb toPb() { + PipelinePermissionsRequestPb pb = new PipelinePermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setPipelineId(pipelineId); + + return pb; + } + + static PipelinePermissionsRequest fromPb(PipelinePermissionsRequestPb pb) { + PipelinePermissionsRequest model = new PipelinePermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class PipelinePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PipelinePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelinePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelinePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public PipelinePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelinePermissionsRequestPb pb = mapper.readValue(p, PipelinePermissionsRequestPb.class); + return PipelinePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequestPb.java new file mode 100755 index 000000000..db056f909 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PipelinePermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String pipelineId; + + public PipelinePermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public PipelinePermissionsRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelinePermissionsRequestPb that = (PipelinePermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, pipelineId); + } + + @Override + public String toString() { + return new ToStringer(PipelinePermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("pipelineId", pipelineId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index b4c5c4d8e..bd8a56272 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineSpec.PipelineSpecSerializer.class) +@JsonDeserialize(using = PipelineSpec.PipelineSpecDeserializer.class) public class PipelineSpec { /** Budget policy of this pipeline. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** @@ -21,78 +31,60 @@ public class PipelineSpec { * `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity * Catalog. */ - @JsonProperty("catalog") private String catalog; /** DLT Release Channel that specifies which version to use. */ - @JsonProperty("channel") private String channel; /** Cluster settings for this pipeline deployment. */ - @JsonProperty("clusters") private Collection clusters; /** String-String configuration for this pipeline execution. */ - @JsonProperty("configuration") private Map configuration; /** Whether the pipeline is continuous or triggered. This replaces `trigger`. */ - @JsonProperty("continuous") private Boolean continuous; /** Deployment type of this pipeline. */ - @JsonProperty("deployment") private PipelineDeployment deployment; /** Whether the pipeline is in Development mode. Defaults to false. */ - @JsonProperty("development") private Boolean development; /** Pipeline product edition. */ - @JsonProperty("edition") private String edition; /** Event log configuration for this pipeline */ - @JsonProperty("event_log") private EventLogSpec eventLog; /** Filters on which Pipeline packages to include in the deployed graph. */ - @JsonProperty("filters") private Filters filters; /** The definition of a gateway pipeline to support change data capture. */ - @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; /** Unique identifier for this pipeline. */ - @JsonProperty("id") private String id; /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the * 'libraries', 'schema', 'target', or 'catalog' settings. */ - @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; /** Libraries or code needed by this deployment. */ - @JsonProperty("libraries") private Collection libraries; /** Friendly identifier for this pipeline. */ - @JsonProperty("name") private String name; /** List of notification settings for this pipeline. */ - @JsonProperty("notifications") private Collection notifications; /** Whether Photon is enabled for this pipeline. */ - @JsonProperty("photon") private Boolean photon; /** Restart window of this pipeline. */ - @JsonProperty("restart_window") private RestartWindow restartWindow; /** @@ -100,19 +92,15 @@ public class PipelineSpec { * the Databricks user interface and it is added to sys.path when executing Python sources during * pipeline execution. */ - @JsonProperty("root_path") private String rootPath; /** The default schema (database) where tables are read from or published to. */ - @JsonProperty("schema") private String schema; /** Whether serverless compute is enabled for this pipeline. */ - @JsonProperty("serverless") private Boolean serverless; /** DBFS root directory for storing checkpoints and tables. */ - @JsonProperty("storage") private String storage; /** @@ -120,7 +108,6 @@ public class PipelineSpec { * and are therefore subject to the same limitations. A maximum of 25 tags can be added to the * pipeline. */ - @JsonProperty("tags") private Map tags; /** @@ -128,11 +115,9 @@ public class PipelineSpec { * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is * deprecated for pipeline creation in favor of the `schema` field. */ - @JsonProperty("target") private String target; /** Which pipeline trigger to use. Deprecated: Use `continuous` instead. */ - @JsonProperty("trigger") private PipelineTrigger trigger; public PipelineSpec setBudgetPolicyId(String budgetPolicyId) { @@ -464,4 +449,87 @@ public String toString() { .add("trigger", trigger) .toString(); } + + PipelineSpecPb toPb() { + PipelineSpecPb pb = new PipelineSpecPb(); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setCatalog(catalog); + pb.setChannel(channel); + pb.setClusters(clusters); + pb.setConfiguration(configuration); + pb.setContinuous(continuous); + pb.setDeployment(deployment); + pb.setDevelopment(development); + pb.setEdition(edition); + pb.setEventLog(eventLog); + pb.setFilters(filters); + pb.setGatewayDefinition(gatewayDefinition); + pb.setId(id); + pb.setIngestionDefinition(ingestionDefinition); + pb.setLibraries(libraries); + pb.setName(name); + pb.setNotifications(notifications); + pb.setPhoton(photon); + pb.setRestartWindow(restartWindow); + pb.setRootPath(rootPath); + pb.setSchema(schema); + pb.setServerless(serverless); + pb.setStorage(storage); + pb.setTags(tags); + pb.setTarget(target); + pb.setTrigger(trigger); + + return pb; + } + + static PipelineSpec fromPb(PipelineSpecPb pb) { + PipelineSpec model = new PipelineSpec(); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setCatalog(pb.getCatalog()); + model.setChannel(pb.getChannel()); + model.setClusters(pb.getClusters()); + model.setConfiguration(pb.getConfiguration()); + model.setContinuous(pb.getContinuous()); + model.setDeployment(pb.getDeployment()); + model.setDevelopment(pb.getDevelopment()); + model.setEdition(pb.getEdition()); + model.setEventLog(pb.getEventLog()); + model.setFilters(pb.getFilters()); + model.setGatewayDefinition(pb.getGatewayDefinition()); + model.setId(pb.getId()); + model.setIngestionDefinition(pb.getIngestionDefinition()); + model.setLibraries(pb.getLibraries()); + model.setName(pb.getName()); + model.setNotifications(pb.getNotifications()); + model.setPhoton(pb.getPhoton()); + model.setRestartWindow(pb.getRestartWindow()); + model.setRootPath(pb.getRootPath()); + model.setSchema(pb.getSchema()); + model.setServerless(pb.getServerless()); + model.setStorage(pb.getStorage()); + model.setTags(pb.getTags()); + model.setTarget(pb.getTarget()); + model.setTrigger(pb.getTrigger()); + + return model; + } + + public static class PipelineSpecSerializer extends JsonSerializer { + @Override + public void serialize(PipelineSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineSpecDeserializer extends JsonDeserializer { + @Override + public PipelineSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineSpecPb pb = mapper.readValue(p, PipelineSpecPb.class); + return PipelineSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpecPb.java new file mode 100755 index 000000000..aae07e61d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpecPb.java @@ -0,0 +1,421 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class PipelineSpecPb { + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("channel") + private String channel; + + @JsonProperty("clusters") + private Collection clusters; + + @JsonProperty("configuration") + private Map configuration; + + @JsonProperty("continuous") + private Boolean continuous; + + @JsonProperty("deployment") + private PipelineDeployment deployment; + + @JsonProperty("development") + private Boolean development; + + @JsonProperty("edition") + private String edition; + + @JsonProperty("event_log") + private EventLogSpec eventLog; + + @JsonProperty("filters") + private Filters filters; + + @JsonProperty("gateway_definition") + private IngestionGatewayPipelineDefinition gatewayDefinition; + + @JsonProperty("id") + private String id; + + @JsonProperty("ingestion_definition") + private IngestionPipelineDefinition ingestionDefinition; + + @JsonProperty("libraries") + private Collection libraries; + + @JsonProperty("name") + private String name; + + @JsonProperty("notifications") + private Collection notifications; + + @JsonProperty("photon") + private Boolean photon; + + @JsonProperty("restart_window") + private RestartWindow restartWindow; + + @JsonProperty("root_path") + private String rootPath; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("serverless") + private Boolean serverless; + + @JsonProperty("storage") + private String storage; + + @JsonProperty("tags") + private Map tags; + + @JsonProperty("target") + private String target; + + @JsonProperty("trigger") + private PipelineTrigger trigger; + + public PipelineSpecPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public PipelineSpecPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public PipelineSpecPb setChannel(String channel) { + this.channel = channel; + return this; + } + + public String getChannel() { + return channel; + } + + public PipelineSpecPb setClusters(Collection clusters) { + this.clusters = clusters; + return this; + } + + public Collection getClusters() { + return clusters; + } + + public PipelineSpecPb setConfiguration(Map configuration) { + this.configuration = configuration; + return this; + } + + public Map getConfiguration() { + return configuration; + } + + public PipelineSpecPb setContinuous(Boolean continuous) { + this.continuous = continuous; + return this; + } + + public Boolean getContinuous() { + return continuous; + } + + public PipelineSpecPb setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + + public PipelineSpecPb setDevelopment(Boolean development) { + this.development = development; + return this; + } + + public Boolean getDevelopment() { + return development; + } + + public PipelineSpecPb setEdition(String edition) { + this.edition = edition; + return this; + } + + public String getEdition() { + return edition; + } + + public PipelineSpecPb setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + + public PipelineSpecPb setFilters(Filters filters) { + this.filters = filters; + return this; + } + + public Filters getFilters() { + return filters; + } + + public PipelineSpecPb setGatewayDefinition(IngestionGatewayPipelineDefinition gatewayDefinition) { + this.gatewayDefinition = gatewayDefinition; + return this; + } + + public IngestionGatewayPipelineDefinition getGatewayDefinition() { + return gatewayDefinition; + } + + public PipelineSpecPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public PipelineSpecPb setIngestionDefinition(IngestionPipelineDefinition ingestionDefinition) { + this.ingestionDefinition = ingestionDefinition; + return this; + } + + public IngestionPipelineDefinition getIngestionDefinition() { + return ingestionDefinition; + } + + public PipelineSpecPb setLibraries(Collection libraries) { + this.libraries = libraries; + return this; + } + + public Collection getLibraries() { + return libraries; + } + + public PipelineSpecPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PipelineSpecPb setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public PipelineSpecPb setPhoton(Boolean photon) { + this.photon = photon; + return this; + } + + public Boolean getPhoton() { + return photon; + } + + public PipelineSpecPb setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + + public PipelineSpecPb setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + + public PipelineSpecPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public PipelineSpecPb setServerless(Boolean serverless) { + this.serverless = serverless; + return this; + } + + public Boolean getServerless() { + return serverless; + } + + public PipelineSpecPb setStorage(String storage) { + this.storage = storage; + return this; + } + + public String getStorage() { + return storage; + } + + public PipelineSpecPb setTags(Map tags) { + this.tags = tags; + return this; + } + + public Map getTags() { + return tags; + } + + public PipelineSpecPb setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + public PipelineSpecPb setTrigger(PipelineTrigger trigger) { + this.trigger = trigger; + return this; + } + + public PipelineTrigger getTrigger() { + return trigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineSpecPb that = (PipelineSpecPb) o; + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(catalog, that.catalog) + && Objects.equals(channel, that.channel) + && Objects.equals(clusters, that.clusters) + && Objects.equals(configuration, that.configuration) + && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) + && Objects.equals(development, that.development) + && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) + && Objects.equals(filters, that.filters) + && Objects.equals(gatewayDefinition, that.gatewayDefinition) + && Objects.equals(id, that.id) + && Objects.equals(ingestionDefinition, that.ingestionDefinition) + && Objects.equals(libraries, that.libraries) + && Objects.equals(name, that.name) + && Objects.equals(notifications, that.notifications) + && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) + && Objects.equals(schema, that.schema) + && Objects.equals(serverless, that.serverless) + && Objects.equals(storage, that.storage) + && Objects.equals(tags, that.tags) + && Objects.equals(target, that.target) + && Objects.equals(trigger, that.trigger); + } + + @Override + public int hashCode() { + return Objects.hash( + budgetPolicyId, + catalog, + channel, + clusters, + configuration, + continuous, + deployment, + development, + edition, + eventLog, + filters, + gatewayDefinition, + id, + ingestionDefinition, + libraries, + name, + notifications, + photon, + restartWindow, + rootPath, + schema, + serverless, + storage, + tags, + target, + trigger); + } + + @Override + public String toString() { + return new ToStringer(PipelineSpecPb.class) + .add("budgetPolicyId", budgetPolicyId) + .add("catalog", catalog) + .add("channel", channel) + .add("clusters", clusters) + .add("configuration", configuration) + .add("continuous", continuous) + .add("deployment", deployment) + .add("development", development) + .add("edition", edition) + .add("eventLog", eventLog) + .add("filters", filters) + .add("gatewayDefinition", gatewayDefinition) + .add("id", id) + .add("ingestionDefinition", ingestionDefinition) + .add("libraries", libraries) + .add("name", name) + .add("notifications", notifications) + .add("photon", photon) + .add("restartWindow", restartWindow) + .add("rootPath", rootPath) + .add("schema", schema) + .add("serverless", serverless) + .add("storage", storage) + .add("tags", tags) + .add("target", target) + .add("trigger", trigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java index dafa63362..cb5845b61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java @@ -4,45 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineStateInfo.PipelineStateInfoSerializer.class) +@JsonDeserialize(using = PipelineStateInfo.PipelineStateInfoDeserializer.class) public class PipelineStateInfo { /** The unique identifier of the cluster running the pipeline. */ - @JsonProperty("cluster_id") private String clusterId; /** The username of the pipeline creator. */ - @JsonProperty("creator_user_name") private String creatorUserName; /** The health of a pipeline. */ - @JsonProperty("health") private PipelineStateInfoHealth health; /** Status of the latest updates for the pipeline. Ordered with the newest update first. */ - @JsonProperty("latest_updates") private Collection latestUpdates; /** The user-friendly name of the pipeline. */ - @JsonProperty("name") private String name; /** The unique identifier of the pipeline. */ - @JsonProperty("pipeline_id") private String pipelineId; /** * The username that the pipeline runs as. This is a read only value derived from the pipeline * owner. */ - @JsonProperty("run_as_user_name") private String runAsUserName; /** The pipeline state. */ - @JsonProperty("state") private PipelineState state; public PipelineStateInfo setClusterId(String clusterId) { @@ -151,4 +154,52 @@ public String toString() { .add("state", state) .toString(); } + + PipelineStateInfoPb toPb() { + PipelineStateInfoPb pb = new PipelineStateInfoPb(); + pb.setClusterId(clusterId); + pb.setCreatorUserName(creatorUserName); + pb.setHealth(health); + pb.setLatestUpdates(latestUpdates); + pb.setName(name); + pb.setPipelineId(pipelineId); + pb.setRunAsUserName(runAsUserName); + pb.setState(state); + + return pb; + } + + static PipelineStateInfo fromPb(PipelineStateInfoPb pb) { + PipelineStateInfo model = new PipelineStateInfo(); + model.setClusterId(pb.getClusterId()); + model.setCreatorUserName(pb.getCreatorUserName()); + model.setHealth(pb.getHealth()); + model.setLatestUpdates(pb.getLatestUpdates()); + model.setName(pb.getName()); + model.setPipelineId(pb.getPipelineId()); + model.setRunAsUserName(pb.getRunAsUserName()); + model.setState(pb.getState()); + + return model; + } + + public static class PipelineStateInfoSerializer extends JsonSerializer { + @Override + public void serialize(PipelineStateInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineStateInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineStateInfoDeserializer extends JsonDeserializer { + @Override + public PipelineStateInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineStateInfoPb pb = mapper.readValue(p, PipelineStateInfoPb.class); + return PipelineStateInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfoPb.java new file mode 100755 index 000000000..80f889157 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfoPb.java @@ -0,0 +1,143 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PipelineStateInfoPb { + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("creator_user_name") + private String creatorUserName; + + @JsonProperty("health") + private PipelineStateInfoHealth health; + + @JsonProperty("latest_updates") + private Collection latestUpdates; + + @JsonProperty("name") + private String name; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("run_as_user_name") + private String runAsUserName; + + @JsonProperty("state") + private PipelineState state; + + public PipelineStateInfoPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public PipelineStateInfoPb setCreatorUserName(String creatorUserName) { + this.creatorUserName = creatorUserName; + return this; + } + + public String getCreatorUserName() { + return creatorUserName; + } + + public PipelineStateInfoPb setHealth(PipelineStateInfoHealth health) { + this.health = health; + return this; + } + + public PipelineStateInfoHealth getHealth() { + return health; + } + + public PipelineStateInfoPb setLatestUpdates(Collection latestUpdates) { + this.latestUpdates = latestUpdates; + return this; + } + + public Collection getLatestUpdates() { + return latestUpdates; + } + + public PipelineStateInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PipelineStateInfoPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public PipelineStateInfoPb setRunAsUserName(String runAsUserName) { + this.runAsUserName = runAsUserName; + return this; + } + + public String getRunAsUserName() { + return runAsUserName; + } + + public PipelineStateInfoPb setState(PipelineState state) { + this.state = state; + return this; + } + + public PipelineState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineStateInfoPb that = (PipelineStateInfoPb) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(health, that.health) + && Objects.equals(latestUpdates, that.latestUpdates) + && Objects.equals(name, that.name) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(runAsUserName, that.runAsUserName) + && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash( + clusterId, creatorUserName, health, latestUpdates, name, pipelineId, runAsUserName, state); + } + + @Override + public String toString() { + return new ToStringer(PipelineStateInfoPb.class) + .add("clusterId", clusterId) + .add("creatorUserName", creatorUserName) + .add("health", health) + .add("latestUpdates", latestUpdates) + .add("name", name) + .add("pipelineId", pipelineId) + .add("runAsUserName", runAsUserName) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTrigger.java index 14350802f..b35ebfa0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTrigger.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTrigger.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PipelineTrigger.PipelineTriggerSerializer.class) +@JsonDeserialize(using = PipelineTrigger.PipelineTriggerDeserializer.class) public class PipelineTrigger { /** */ - @JsonProperty("cron") private CronTrigger cron; /** */ - @JsonProperty("manual") private ManualTrigger manual; public PipelineTrigger setCron(CronTrigger cron) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(PipelineTrigger.class).add("cron", cron).add("manual", manual).toString(); } + + PipelineTriggerPb toPb() { + PipelineTriggerPb pb = new PipelineTriggerPb(); + pb.setCron(cron); + pb.setManual(manual); + + return pb; + } + + static PipelineTrigger fromPb(PipelineTriggerPb pb) { + PipelineTrigger model = new PipelineTrigger(); + model.setCron(pb.getCron()); + model.setManual(pb.getManual()); + + return model; + } + + public static class PipelineTriggerSerializer extends JsonSerializer { + @Override + public void serialize(PipelineTrigger value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PipelineTriggerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PipelineTriggerDeserializer extends JsonDeserializer { + @Override + public PipelineTrigger deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PipelineTriggerPb pb = mapper.readValue(p, PipelineTriggerPb.class); + return PipelineTrigger.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTriggerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTriggerPb.java new file mode 100755 index 000000000..4dfa83823 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTriggerPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PipelineTriggerPb { + @JsonProperty("cron") + private CronTrigger cron; + + @JsonProperty("manual") + private ManualTrigger manual; + + public PipelineTriggerPb setCron(CronTrigger cron) { + this.cron = cron; + return this; + } + + public CronTrigger getCron() { + return cron; + } + + public PipelineTriggerPb setManual(ManualTrigger manual) { + this.manual = manual; + return this; + } + + public ManualTrigger getManual() { + return manual; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineTriggerPb that = (PipelineTriggerPb) o; + return Objects.equals(cron, that.cron) && Objects.equals(manual, that.manual); + } + + @Override + public int hashCode() { + return Objects.hash(cron, manual); + } + + @Override + public String toString() { + return new ToStringer(PipelineTriggerPb.class) + .add("cron", cron) + .add("manual", manual) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java index 91077477d..49d8da5e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java @@ -21,7 +21,7 @@ public CreatePipelineResponse create(CreatePipeline request) { String path = "/api/2.0/pipelines"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreatePipelineResponse.class); @@ -35,7 +35,7 @@ public void delete(DeletePipelineRequest request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeletePipelineResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetPipelineResponse get(GetPipelineRequest request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPipelineResponse.class); } catch (IOException e) { @@ -64,7 +64,7 @@ public GetPipelinePermissionLevelsResponse getPermissionLevels( "/api/2.0/permissions/pipelines/%s/permissionLevels", request.getPipelineId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetPipelinePermissionLevelsResponse.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public PipelinePermissions getPermissions(GetPipelinePermissionsRequest request) String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PipelinePermissions.class); } catch (IOException e) { @@ -92,7 +92,7 @@ public GetUpdateResponse getUpdate(GetUpdateRequest request) { "/api/2.0/pipelines/%s/updates/%s", request.getPipelineId(), request.getUpdateId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetUpdateResponse.class); } catch (IOException e) { @@ -105,7 +105,7 @@ public ListPipelineEventsResponse listPipelineEvents(ListPipelineEventsRequest r String path = String.format("/api/2.0/pipelines/%s/events", request.getPipelineId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListPipelineEventsResponse.class); } catch (IOException e) { @@ -118,7 +118,7 @@ public ListPipelinesResponse listPipelines(ListPipelinesRequest request) { String path = "/api/2.0/pipelines"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListPipelinesResponse.class); } catch (IOException e) { @@ -131,7 +131,7 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) { String path = String.format("/api/2.0/pipelines/%s/updates", request.getPipelineId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListUpdatesResponse.class); } catch (IOException e) { @@ -144,7 +144,7 @@ public PipelinePermissions setPermissions(PipelinePermissionsRequest request) { String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PipelinePermissions.class); @@ -158,7 +158,7 @@ public StartUpdateResponse startUpdate(StartUpdate request) { String path = String.format("/api/2.0/pipelines/%s/updates", request.getPipelineId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, StartUpdateResponse.class); @@ -172,7 +172,7 @@ public void stop(StopRequest request) { String path = String.format("/api/2.0/pipelines/%s/stop", request.getPipelineId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, StopPipelineResponse.class); } catch (IOException e) { @@ -185,7 +185,7 @@ public void update(EditPipeline request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditPipelineResponse.class); @@ -199,7 +199,7 @@ public PipelinePermissions updatePermissions(PipelinePermissionsRequest request) String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PipelinePermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java index b737fbd9a..68ae645a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java @@ -4,34 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ReportSpec.ReportSpecSerializer.class) +@JsonDeserialize(using = ReportSpec.ReportSpecDeserializer.class) public class ReportSpec { /** Required. Destination catalog to store table. */ - @JsonProperty("destination_catalog") private String destinationCatalog; /** Required. Destination schema to store table. */ - @JsonProperty("destination_schema") private String destinationSchema; /** * Required. Destination table name. The pipeline fails if a table with that name already exists. */ - @JsonProperty("destination_table") private String destinationTable; /** Required. Report URL in the source system. */ - @JsonProperty("source_url") private String sourceUrl; /** * Configuration settings to control the ingestion of tables. These settings override the * table_configuration defined in the IngestionPipelineDefinition object. */ - @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; public ReportSpec setDestinationCatalog(String destinationCatalog) { @@ -107,4 +113,45 @@ public String toString() { .add("tableConfiguration", tableConfiguration) .toString(); } + + ReportSpecPb toPb() { + ReportSpecPb pb = new ReportSpecPb(); + pb.setDestinationCatalog(destinationCatalog); + pb.setDestinationSchema(destinationSchema); + pb.setDestinationTable(destinationTable); + pb.setSourceUrl(sourceUrl); + pb.setTableConfiguration(tableConfiguration); + + return pb; + } + + static ReportSpec fromPb(ReportSpecPb pb) { + ReportSpec model = new ReportSpec(); + model.setDestinationCatalog(pb.getDestinationCatalog()); + model.setDestinationSchema(pb.getDestinationSchema()); + model.setDestinationTable(pb.getDestinationTable()); + model.setSourceUrl(pb.getSourceUrl()); + model.setTableConfiguration(pb.getTableConfiguration()); + + return model; + } + + public static class ReportSpecSerializer extends JsonSerializer { + @Override + public void serialize(ReportSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReportSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReportSpecDeserializer extends JsonDeserializer { + @Override + public ReportSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReportSpecPb pb = mapper.readValue(p, ReportSpecPb.class); + return ReportSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpecPb.java new file mode 100755 index 000000000..fee0ace22 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpecPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ReportSpecPb { + @JsonProperty("destination_catalog") + private String destinationCatalog; + + @JsonProperty("destination_schema") + private String destinationSchema; + + @JsonProperty("destination_table") + private String destinationTable; + + @JsonProperty("source_url") + private String sourceUrl; + + @JsonProperty("table_configuration") + private TableSpecificConfig tableConfiguration; + + public ReportSpecPb setDestinationCatalog(String destinationCatalog) { + this.destinationCatalog = destinationCatalog; + return this; + } + + public String getDestinationCatalog() { + return destinationCatalog; + } + + public ReportSpecPb setDestinationSchema(String destinationSchema) { + this.destinationSchema = destinationSchema; + return this; + } + + public String getDestinationSchema() { + return destinationSchema; + } + + public ReportSpecPb setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + public String getDestinationTable() { + return destinationTable; + } + + public ReportSpecPb setSourceUrl(String sourceUrl) { + this.sourceUrl = sourceUrl; + return this; + } + + public String getSourceUrl() { + return sourceUrl; + } + + public ReportSpecPb setTableConfiguration(TableSpecificConfig tableConfiguration) { + this.tableConfiguration = tableConfiguration; + return this; + } + + public TableSpecificConfig getTableConfiguration() { + return tableConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReportSpecPb that = (ReportSpecPb) o; + return Objects.equals(destinationCatalog, that.destinationCatalog) + && Objects.equals(destinationSchema, that.destinationSchema) + && Objects.equals(destinationTable, that.destinationTable) + && Objects.equals(sourceUrl, that.sourceUrl) + && Objects.equals(tableConfiguration, that.tableConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash( + destinationCatalog, destinationSchema, destinationTable, sourceUrl, tableConfiguration); + } + + @Override + public String toString() { + return new ToStringer(ReportSpecPb.class) + .add("destinationCatalog", destinationCatalog) + .add("destinationSchema", destinationSchema) + .add("destinationTable", destinationTable) + .add("sourceUrl", sourceUrl) + .add("tableConfiguration", tableConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java index c4b6cc278..6885261c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RestartWindow.RestartWindowSerializer.class) +@JsonDeserialize(using = RestartWindow.RestartWindowDeserializer.class) public class RestartWindow { /** * Days of week in which the restart is allowed to happen (within a five-hour window starting at * start_hour). If not specified all days of the week will be used. */ - @JsonProperty("days_of_week") private Collection daysOfWeek; /** * An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. * Continuous pipeline restart is triggered only within a five-hour window starting at this hour. */ - @JsonProperty("start_hour") private Long startHour; /** @@ -29,7 +38,6 @@ public class RestartWindow { * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html * for details. If not specified, UTC will be used. */ - @JsonProperty("time_zone_id") private String timeZoneId; public RestartWindow setDaysOfWeek(Collection daysOfWeek) { @@ -82,4 +90,41 @@ public String toString() { .add("timeZoneId", timeZoneId) .toString(); } + + RestartWindowPb toPb() { + RestartWindowPb pb = new RestartWindowPb(); + pb.setDaysOfWeek(daysOfWeek); + pb.setStartHour(startHour); + pb.setTimeZoneId(timeZoneId); + + return pb; + } + + static RestartWindow fromPb(RestartWindowPb pb) { + RestartWindow model = new RestartWindow(); + model.setDaysOfWeek(pb.getDaysOfWeek()); + model.setStartHour(pb.getStartHour()); + model.setTimeZoneId(pb.getTimeZoneId()); + + return model; + } + + public static class RestartWindowSerializer extends JsonSerializer { + @Override + public void serialize(RestartWindow value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestartWindowPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestartWindowDeserializer extends JsonDeserializer { + @Override + public RestartWindow deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestartWindowPb pb = mapper.readValue(p, RestartWindowPb.class); + return RestartWindow.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowPb.java new file mode 100755 index 000000000..62b1a3962 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RestartWindowPb { + @JsonProperty("days_of_week") + private Collection daysOfWeek; + + @JsonProperty("start_hour") + private Long startHour; + + @JsonProperty("time_zone_id") + private String timeZoneId; + + public RestartWindowPb setDaysOfWeek(Collection daysOfWeek) { + this.daysOfWeek = daysOfWeek; + return this; + } + + public Collection getDaysOfWeek() { + return daysOfWeek; + } + + public RestartWindowPb setStartHour(Long startHour) { + this.startHour = startHour; + return this; + } + + public Long getStartHour() { + return startHour; + } + + public RestartWindowPb setTimeZoneId(String timeZoneId) { + this.timeZoneId = timeZoneId; + return this; + } + + public String getTimeZoneId() { + return timeZoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestartWindowPb that = (RestartWindowPb) o; + return Objects.equals(daysOfWeek, that.daysOfWeek) + && Objects.equals(startHour, that.startHour) + && Objects.equals(timeZoneId, that.timeZoneId); + } + + @Override + public int hashCode() { + return Objects.hash(daysOfWeek, startHour, timeZoneId); + } + + @Override + public String toString() { + return new ToStringer(RestartWindowPb.class) + .add("daysOfWeek", daysOfWeek) + .add("startHour", startHour) + .add("timeZoneId", timeZoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java index 0258b123e..44ecb2318 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -16,16 +25,16 @@ * is thrown. */ @Generated +@JsonSerialize(using = RunAs.RunAsSerializer.class) +@JsonDeserialize(using = RunAs.RunAsDeserializer.class) public class RunAs { /** * Application ID of an active service principal. Setting this field requires the * `servicePrincipal/user` role. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** The email of an active workspace user. Users can only set this field to their own email. */ - @JsonProperty("user_name") private String userName; public RunAs setServicePrincipalName(String servicePrincipalName) { @@ -67,4 +76,39 @@ public String toString() { .add("userName", userName) .toString(); } + + RunAsPb toPb() { + RunAsPb pb = new RunAsPb(); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static RunAs fromPb(RunAsPb pb) { + RunAs model = new RunAs(); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class RunAsSerializer extends JsonSerializer { + @Override + public void serialize(RunAs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RunAsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RunAsDeserializer extends JsonDeserializer { + @Override + public RunAs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RunAsPb pb = mapper.readValue(p, RunAsPb.class); + return RunAs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAsPb.java new file mode 100755 index 000000000..eff6eccd8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAsPb.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Write-only setting, available only in Create/Update calls. Specifies the user or service + * principal that the pipeline runs as. If not specified, the pipeline runs as the user who created + * the pipeline. + * + *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an error + * is thrown. + */ +@Generated +class RunAsPb { + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public RunAsPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RunAsPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunAsPb that = (RunAsPb) o; + return Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RunAsPb.class) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java index 6f25d25f9..2d0d8bab5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SchemaSpec.SchemaSpecSerializer.class) +@JsonDeserialize(using = SchemaSpec.SchemaSpecDeserializer.class) public class SchemaSpec { /** Required. Destination catalog to store tables. */ - @JsonProperty("destination_catalog") private String destinationCatalog; /** @@ -18,15 +28,12 @@ public class SchemaSpec { * are created in this destination schema. The pipeline fails If a table with the same name * already exists. */ - @JsonProperty("destination_schema") private String destinationSchema; /** The source catalog name. Might be optional depending on the type of source. */ - @JsonProperty("source_catalog") private String sourceCatalog; /** Required. Schema name in the source database. */ - @JsonProperty("source_schema") private String sourceSchema; /** @@ -34,7 +41,6 @@ public class SchemaSpec { * tables in this schema and override the table_configuration defined in the * IngestionPipelineDefinition object. */ - @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; public SchemaSpec setDestinationCatalog(String destinationCatalog) { @@ -110,4 +116,45 @@ public String toString() { .add("tableConfiguration", tableConfiguration) .toString(); } + + SchemaSpecPb toPb() { + SchemaSpecPb pb = new SchemaSpecPb(); + pb.setDestinationCatalog(destinationCatalog); + pb.setDestinationSchema(destinationSchema); + pb.setSourceCatalog(sourceCatalog); + pb.setSourceSchema(sourceSchema); + pb.setTableConfiguration(tableConfiguration); + + return pb; + } + + static SchemaSpec fromPb(SchemaSpecPb pb) { + SchemaSpec model = new SchemaSpec(); + model.setDestinationCatalog(pb.getDestinationCatalog()); + model.setDestinationSchema(pb.getDestinationSchema()); + model.setSourceCatalog(pb.getSourceCatalog()); + model.setSourceSchema(pb.getSourceSchema()); + model.setTableConfiguration(pb.getTableConfiguration()); + + return model; + } + + public static class SchemaSpecSerializer extends JsonSerializer { + @Override + public void serialize(SchemaSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SchemaSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SchemaSpecDeserializer extends JsonDeserializer { + @Override + public SchemaSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SchemaSpecPb pb = mapper.readValue(p, SchemaSpecPb.class); + return SchemaSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpecPb.java new file mode 100755 index 000000000..087e77116 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpecPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SchemaSpecPb { + @JsonProperty("destination_catalog") + private String destinationCatalog; + + @JsonProperty("destination_schema") + private String destinationSchema; + + @JsonProperty("source_catalog") + private String sourceCatalog; + + @JsonProperty("source_schema") + private String sourceSchema; + + @JsonProperty("table_configuration") + private TableSpecificConfig tableConfiguration; + + public SchemaSpecPb setDestinationCatalog(String destinationCatalog) { + this.destinationCatalog = destinationCatalog; + return this; + } + + public String getDestinationCatalog() { + return destinationCatalog; + } + + public SchemaSpecPb setDestinationSchema(String destinationSchema) { + this.destinationSchema = destinationSchema; + return this; + } + + public String getDestinationSchema() { + return destinationSchema; + } + + public SchemaSpecPb setSourceCatalog(String sourceCatalog) { + this.sourceCatalog = sourceCatalog; + return this; + } + + public String getSourceCatalog() { + return sourceCatalog; + } + + public SchemaSpecPb setSourceSchema(String sourceSchema) { + this.sourceSchema = sourceSchema; + return this; + } + + public String getSourceSchema() { + return sourceSchema; + } + + public SchemaSpecPb setTableConfiguration(TableSpecificConfig tableConfiguration) { + this.tableConfiguration = tableConfiguration; + return this; + } + + public TableSpecificConfig getTableConfiguration() { + return tableConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SchemaSpecPb that = (SchemaSpecPb) o; + return Objects.equals(destinationCatalog, that.destinationCatalog) + && Objects.equals(destinationSchema, that.destinationSchema) + && Objects.equals(sourceCatalog, that.sourceCatalog) + && Objects.equals(sourceSchema, that.sourceSchema) + && Objects.equals(tableConfiguration, that.tableConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash( + destinationCatalog, destinationSchema, sourceCatalog, sourceSchema, tableConfiguration); + } + + @Override + public String toString() { + return new ToStringer(SchemaSpecPb.class) + .add("destinationCatalog", destinationCatalog) + .add("destinationSchema", destinationSchema) + .add("sourceCatalog", sourceCatalog) + .add("sourceSchema", sourceSchema) + .add("tableConfiguration", tableConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java index 320bd660d..7436be424 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Sequencing.SequencingSerializer.class) +@JsonDeserialize(using = Sequencing.SequencingDeserializer.class) public class Sequencing { /** A sequence number, unique and increasing within the control plane. */ - @JsonProperty("control_plane_seq_no") private Long controlPlaneSeqNo; /** the ID assigned by the data plane. */ - @JsonProperty("data_plane_id") private DataPlaneId dataPlaneId; public Sequencing setControlPlaneSeqNo(Long controlPlaneSeqNo) { @@ -56,4 +65,39 @@ public String toString() { .add("dataPlaneId", dataPlaneId) .toString(); } + + SequencingPb toPb() { + SequencingPb pb = new SequencingPb(); + pb.setControlPlaneSeqNo(controlPlaneSeqNo); + pb.setDataPlaneId(dataPlaneId); + + return pb; + } + + static Sequencing fromPb(SequencingPb pb) { + Sequencing model = new Sequencing(); + model.setControlPlaneSeqNo(pb.getControlPlaneSeqNo()); + model.setDataPlaneId(pb.getDataPlaneId()); + + return model; + } + + public static class SequencingSerializer extends JsonSerializer { + @Override + public void serialize(Sequencing value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SequencingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SequencingDeserializer extends JsonDeserializer { + @Override + public Sequencing deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SequencingPb pb = mapper.readValue(p, SequencingPb.class); + return Sequencing.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SequencingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SequencingPb.java new file mode 100755 index 000000000..56374506e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SequencingPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SequencingPb { + @JsonProperty("control_plane_seq_no") + private Long controlPlaneSeqNo; + + @JsonProperty("data_plane_id") + private DataPlaneId dataPlaneId; + + public SequencingPb setControlPlaneSeqNo(Long controlPlaneSeqNo) { + this.controlPlaneSeqNo = controlPlaneSeqNo; + return this; + } + + public Long getControlPlaneSeqNo() { + return controlPlaneSeqNo; + } + + public SequencingPb setDataPlaneId(DataPlaneId dataPlaneId) { + this.dataPlaneId = dataPlaneId; + return this; + } + + public DataPlaneId getDataPlaneId() { + return dataPlaneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SequencingPb that = (SequencingPb) o; + return Objects.equals(controlPlaneSeqNo, that.controlPlaneSeqNo) + && Objects.equals(dataPlaneId, that.dataPlaneId); + } + + @Override + public int hashCode() { + return Objects.hash(controlPlaneSeqNo, dataPlaneId); + } + + @Override + public String toString() { + return new ToStringer(SequencingPb.class) + .add("controlPlaneSeqNo", controlPlaneSeqNo) + .add("dataPlaneId", dataPlaneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java index 1c98c301c..feb175896 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SerializedException.SerializedExceptionSerializer.class) +@JsonDeserialize(using = SerializedException.SerializedExceptionDeserializer.class) public class SerializedException { /** Runtime class of the exception */ - @JsonProperty("class_name") private String className; /** Exception message */ - @JsonProperty("message") private String message; /** Stack trace consisting of a list of stack frames */ - @JsonProperty("stack") private Collection stack; public SerializedException setClassName(String className) { @@ -72,4 +80,43 @@ public String toString() { .add("stack", stack) .toString(); } + + SerializedExceptionPb toPb() { + SerializedExceptionPb pb = new SerializedExceptionPb(); + pb.setClassName(className); + pb.setMessage(message); + pb.setStack(stack); + + return pb; + } + + static SerializedException fromPb(SerializedExceptionPb pb) { + SerializedException model = new SerializedException(); + model.setClassName(pb.getClassName()); + model.setMessage(pb.getMessage()); + model.setStack(pb.getStack()); + + return model; + } + + public static class SerializedExceptionSerializer extends JsonSerializer { + @Override + public void serialize(SerializedException value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SerializedExceptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SerializedExceptionDeserializer + extends JsonDeserializer { + @Override + public SerializedException deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SerializedExceptionPb pb = mapper.readValue(p, SerializedExceptionPb.class); + return SerializedException.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedExceptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedExceptionPb.java new file mode 100755 index 000000000..760d34f07 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedExceptionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SerializedExceptionPb { + @JsonProperty("class_name") + private String className; + + @JsonProperty("message") + private String message; + + @JsonProperty("stack") + private Collection stack; + + public SerializedExceptionPb setClassName(String className) { + this.className = className; + return this; + } + + public String getClassName() { + return className; + } + + public SerializedExceptionPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public SerializedExceptionPb setStack(Collection stack) { + this.stack = stack; + return this; + } + + public Collection getStack() { + return stack; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SerializedExceptionPb that = (SerializedExceptionPb) o; + return Objects.equals(className, that.className) + && Objects.equals(message, that.message) + && Objects.equals(stack, that.stack); + } + + @Override + public int hashCode() { + return Objects.hash(className, message, stack); + } + + @Override + public String toString() { + return new ToStringer(SerializedExceptionPb.class) + .add("className", className) + .add("message", message) + .add("stack", stack) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java index 91fd8eb8f..5947c2058 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StackFrame.StackFrameSerializer.class) +@JsonDeserialize(using = StackFrame.StackFrameDeserializer.class) public class StackFrame { /** Class from which the method call originated */ - @JsonProperty("declaring_class") private String declaringClass; /** File where the method is defined */ - @JsonProperty("file_name") private String fileName; /** Line from which the method was called */ - @JsonProperty("line_number") private Long lineNumber; /** Name of the method which was called */ - @JsonProperty("method_name") private String methodName; public StackFrame setDeclaringClass(String declaringClass) { @@ -86,4 +93,43 @@ public String toString() { .add("methodName", methodName) .toString(); } + + StackFramePb toPb() { + StackFramePb pb = new StackFramePb(); + pb.setDeclaringClass(declaringClass); + pb.setFileName(fileName); + pb.setLineNumber(lineNumber); + pb.setMethodName(methodName); + + return pb; + } + + static StackFrame fromPb(StackFramePb pb) { + StackFrame model = new StackFrame(); + model.setDeclaringClass(pb.getDeclaringClass()); + model.setFileName(pb.getFileName()); + model.setLineNumber(pb.getLineNumber()); + model.setMethodName(pb.getMethodName()); + + return model; + } + + public static class StackFrameSerializer extends JsonSerializer { + @Override + public void serialize(StackFrame value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StackFramePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StackFrameDeserializer extends JsonDeserializer { + @Override + public StackFrame deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StackFramePb pb = mapper.readValue(p, StackFramePb.class); + return StackFrame.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFramePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFramePb.java new file mode 100755 index 000000000..12f5b740c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFramePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StackFramePb { + @JsonProperty("declaring_class") + private String declaringClass; + + @JsonProperty("file_name") + private String fileName; + + @JsonProperty("line_number") + private Long lineNumber; + + @JsonProperty("method_name") + private String methodName; + + public StackFramePb setDeclaringClass(String declaringClass) { + this.declaringClass = declaringClass; + return this; + } + + public String getDeclaringClass() { + return declaringClass; + } + + public StackFramePb setFileName(String fileName) { + this.fileName = fileName; + return this; + } + + public String getFileName() { + return fileName; + } + + public StackFramePb setLineNumber(Long lineNumber) { + this.lineNumber = lineNumber; + return this; + } + + public Long getLineNumber() { + return lineNumber; + } + + public StackFramePb setMethodName(String methodName) { + this.methodName = methodName; + return this; + } + + public String getMethodName() { + return methodName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StackFramePb that = (StackFramePb) o; + return Objects.equals(declaringClass, that.declaringClass) + && Objects.equals(fileName, that.fileName) + && Objects.equals(lineNumber, that.lineNumber) + && Objects.equals(methodName, that.methodName); + } + + @Override + public int hashCode() { + return Objects.hash(declaringClass, fileName, lineNumber, methodName); + } + + @Override + public String toString() { + return new ToStringer(StackFramePb.class) + .add("declaringClass", declaringClass) + .add("fileName", fileName) + .add("lineNumber", lineNumber) + .add("methodName", methodName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java index 687fa8be1..231f6b83a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java @@ -4,19 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = StartUpdate.StartUpdateSerializer.class) +@JsonDeserialize(using = StartUpdate.StartUpdateDeserializer.class) public class StartUpdate { /** What triggered this update. */ - @JsonProperty("cause") private StartUpdateCause cause; /** If true, this update will reset all tables before running. */ - @JsonProperty("full_refresh") private Boolean fullRefresh; /** @@ -24,25 +32,22 @@ public class StartUpdate { * full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means * that the states of the table will be reset before the refresh. */ - @JsonProperty("full_refresh_selection") private Collection fullRefreshSelection; /** */ - @JsonIgnore private String pipelineId; + private String pipelineId; /** * A list of tables to update without fullRefresh. If both refresh_selection and * full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means * that the states of the table will be reset before the refresh. */ - @JsonProperty("refresh_selection") private Collection refreshSelection; /** * If true, this update only validates the correctness of pipeline source code but does not * materialize or publish any datasets. */ - @JsonProperty("validate_only") private Boolean validateOnly; public StartUpdate setCause(StartUpdateCause cause) { @@ -129,4 +134,47 @@ public String toString() { .add("validateOnly", validateOnly) .toString(); } + + StartUpdatePb toPb() { + StartUpdatePb pb = new StartUpdatePb(); + pb.setCause(cause); + pb.setFullRefresh(fullRefresh); + pb.setFullRefreshSelection(fullRefreshSelection); + pb.setPipelineId(pipelineId); + pb.setRefreshSelection(refreshSelection); + pb.setValidateOnly(validateOnly); + + return pb; + } + + static StartUpdate fromPb(StartUpdatePb pb) { + StartUpdate model = new StartUpdate(); + model.setCause(pb.getCause()); + model.setFullRefresh(pb.getFullRefresh()); + model.setFullRefreshSelection(pb.getFullRefreshSelection()); + model.setPipelineId(pb.getPipelineId()); + model.setRefreshSelection(pb.getRefreshSelection()); + model.setValidateOnly(pb.getValidateOnly()); + + return model; + } + + public static class StartUpdateSerializer extends JsonSerializer { + @Override + public void serialize(StartUpdate value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartUpdatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartUpdateDeserializer extends JsonDeserializer { + @Override + public StartUpdate deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartUpdatePb pb = mapper.readValue(p, StartUpdatePb.class); + return StartUpdate.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdatePb.java new file mode 100755 index 000000000..65c40c1ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdatePb.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class StartUpdatePb { + @JsonProperty("cause") + private StartUpdateCause cause; + + @JsonProperty("full_refresh") + private Boolean fullRefresh; + + @JsonProperty("full_refresh_selection") + private Collection fullRefreshSelection; + + @JsonIgnore private String pipelineId; + + @JsonProperty("refresh_selection") + private Collection refreshSelection; + + @JsonProperty("validate_only") + private Boolean validateOnly; + + public StartUpdatePb setCause(StartUpdateCause cause) { + this.cause = cause; + return this; + } + + public StartUpdateCause getCause() { + return cause; + } + + public StartUpdatePb setFullRefresh(Boolean fullRefresh) { + this.fullRefresh = fullRefresh; + return this; + } + + public Boolean getFullRefresh() { + return fullRefresh; + } + + public StartUpdatePb setFullRefreshSelection(Collection fullRefreshSelection) { + this.fullRefreshSelection = fullRefreshSelection; + return this; + } + + public Collection getFullRefreshSelection() { + return fullRefreshSelection; + } + + public StartUpdatePb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public StartUpdatePb setRefreshSelection(Collection refreshSelection) { + this.refreshSelection = refreshSelection; + return this; + } + + public Collection getRefreshSelection() { + return refreshSelection; + } + + public StartUpdatePb setValidateOnly(Boolean validateOnly) { + this.validateOnly = validateOnly; + return this; + } + + public Boolean getValidateOnly() { + return validateOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartUpdatePb that = (StartUpdatePb) o; + return Objects.equals(cause, that.cause) + && Objects.equals(fullRefresh, that.fullRefresh) + && Objects.equals(fullRefreshSelection, that.fullRefreshSelection) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(refreshSelection, that.refreshSelection) + && Objects.equals(validateOnly, that.validateOnly); + } + + @Override + public int hashCode() { + return Objects.hash( + cause, fullRefresh, fullRefreshSelection, pipelineId, refreshSelection, validateOnly); + } + + @Override + public String toString() { + return new ToStringer(StartUpdatePb.class) + .add("cause", cause) + .add("fullRefresh", fullRefresh) + .add("fullRefreshSelection", fullRefreshSelection) + .add("pipelineId", pipelineId) + .add("refreshSelection", refreshSelection) + .add("validateOnly", validateOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java index 655c56a38..3cd427acf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StartUpdateResponse.StartUpdateResponseSerializer.class) +@JsonDeserialize(using = StartUpdateResponse.StartUpdateResponseDeserializer.class) public class StartUpdateResponse { /** */ - @JsonProperty("update_id") private String updateId; public StartUpdateResponse setUpdateId(String updateId) { @@ -39,4 +49,39 @@ public int hashCode() { public String toString() { return new ToStringer(StartUpdateResponse.class).add("updateId", updateId).toString(); } + + StartUpdateResponsePb toPb() { + StartUpdateResponsePb pb = new StartUpdateResponsePb(); + pb.setUpdateId(updateId); + + return pb; + } + + static StartUpdateResponse fromPb(StartUpdateResponsePb pb) { + StartUpdateResponse model = new StartUpdateResponse(); + model.setUpdateId(pb.getUpdateId()); + + return model; + } + + public static class StartUpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(StartUpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartUpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartUpdateResponseDeserializer + extends JsonDeserializer { + @Override + public StartUpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartUpdateResponsePb pb = mapper.readValue(p, StartUpdateResponsePb.class); + return StartUpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponsePb.java new file mode 100755 index 000000000..62ec6b7f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StartUpdateResponsePb { + @JsonProperty("update_id") + private String updateId; + + public StartUpdateResponsePb setUpdateId(String updateId) { + this.updateId = updateId; + return this; + } + + public String getUpdateId() { + return updateId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartUpdateResponsePb that = (StartUpdateResponsePb) o; + return Objects.equals(updateId, that.updateId); + } + + @Override + public int hashCode() { + return Objects.hash(updateId); + } + + @Override + public String toString() { + return new ToStringer(StartUpdateResponsePb.class).add("updateId", updateId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java index 50dfb4c77..be673c3ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StopPipelineResponse.StopPipelineResponseSerializer.class) +@JsonDeserialize(using = StopPipelineResponse.StopPipelineResponseDeserializer.class) public class StopPipelineResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(StopPipelineResponse.class).toString(); } + + StopPipelineResponsePb toPb() { + StopPipelineResponsePb pb = new StopPipelineResponsePb(); + + return pb; + } + + static StopPipelineResponse fromPb(StopPipelineResponsePb pb) { + StopPipelineResponse model = new StopPipelineResponse(); + + return model; + } + + public static class StopPipelineResponseSerializer extends JsonSerializer { + @Override + public void serialize( + StopPipelineResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StopPipelineResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StopPipelineResponseDeserializer + extends JsonDeserializer { + @Override + public StopPipelineResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StopPipelineResponsePb pb = mapper.readValue(p, StopPipelineResponsePb.class); + return StopPipelineResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponsePb.java new file mode 100755 index 000000000..54e63fe49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class StopPipelineResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(StopPipelineResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java index 747427f9c..1031cc6af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Stop a pipeline */ @Generated +@JsonSerialize(using = StopRequest.StopRequestSerializer.class) +@JsonDeserialize(using = StopRequest.StopRequestDeserializer.class) public class StopRequest { /** */ - @JsonIgnore private String pipelineId; + private String pipelineId; public StopRequest setPipelineId(String pipelineId) { this.pipelineId = pipelineId; @@ -39,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(StopRequest.class).add("pipelineId", pipelineId).toString(); } + + StopRequestPb toPb() { + StopRequestPb pb = new StopRequestPb(); + pb.setPipelineId(pipelineId); + + return pb; + } + + static StopRequest fromPb(StopRequestPb pb) { + StopRequest model = new StopRequest(); + model.setPipelineId(pb.getPipelineId()); + + return model; + } + + public static class StopRequestSerializer extends JsonSerializer { + @Override + public void serialize(StopRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StopRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StopRequestDeserializer extends JsonDeserializer { + @Override + public StopRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StopRequestPb pb = mapper.readValue(p, StopRequestPb.class); + return StopRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequestPb.java new file mode 100755 index 000000000..47cca636d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Stop a pipeline */ +@Generated +class StopRequestPb { + @JsonIgnore private String pipelineId; + + public StopRequestPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StopRequestPb that = (StopRequestPb) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(StopRequestPb.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java index 619922530..79fd8cc45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java @@ -4,43 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TableSpec.TableSpecSerializer.class) +@JsonDeserialize(using = TableSpec.TableSpecDeserializer.class) public class TableSpec { /** Required. Destination catalog to store table. */ - @JsonProperty("destination_catalog") private String destinationCatalog; /** Required. Destination schema to store table. */ - @JsonProperty("destination_schema") private String destinationSchema; /** * Optional. Destination table name. The pipeline fails if a table with that name already exists. * If not set, the source table name is used. */ - @JsonProperty("destination_table") private String destinationTable; /** Source catalog name. Might be optional depending on the type of source. */ - @JsonProperty("source_catalog") private String sourceCatalog; /** Schema name in the source database. Might be optional depending on the type of source. */ - @JsonProperty("source_schema") private String sourceSchema; /** Required. Table name in the source database. */ - @JsonProperty("source_table") private String sourceTable; /** * Configuration settings to control the ingestion of tables. These settings override the * table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec. */ - @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; public TableSpec setDestinationCatalog(String destinationCatalog) { @@ -144,4 +148,49 @@ public String toString() { .add("tableConfiguration", tableConfiguration) .toString(); } + + TableSpecPb toPb() { + TableSpecPb pb = new TableSpecPb(); + pb.setDestinationCatalog(destinationCatalog); + pb.setDestinationSchema(destinationSchema); + pb.setDestinationTable(destinationTable); + pb.setSourceCatalog(sourceCatalog); + pb.setSourceSchema(sourceSchema); + pb.setSourceTable(sourceTable); + pb.setTableConfiguration(tableConfiguration); + + return pb; + } + + static TableSpec fromPb(TableSpecPb pb) { + TableSpec model = new TableSpec(); + model.setDestinationCatalog(pb.getDestinationCatalog()); + model.setDestinationSchema(pb.getDestinationSchema()); + model.setDestinationTable(pb.getDestinationTable()); + model.setSourceCatalog(pb.getSourceCatalog()); + model.setSourceSchema(pb.getSourceSchema()); + model.setSourceTable(pb.getSourceTable()); + model.setTableConfiguration(pb.getTableConfiguration()); + + return model; + } + + public static class TableSpecSerializer extends JsonSerializer { + @Override + public void serialize(TableSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableSpecDeserializer extends JsonDeserializer { + @Override + public TableSpec deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableSpecPb pb = mapper.readValue(p, TableSpecPb.class); + return TableSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecPb.java new file mode 100755 index 000000000..0d8019fa5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecPb.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TableSpecPb { + @JsonProperty("destination_catalog") + private String destinationCatalog; + + @JsonProperty("destination_schema") + private String destinationSchema; + + @JsonProperty("destination_table") + private String destinationTable; + + @JsonProperty("source_catalog") + private String sourceCatalog; + + @JsonProperty("source_schema") + private String sourceSchema; + + @JsonProperty("source_table") + private String sourceTable; + + @JsonProperty("table_configuration") + private TableSpecificConfig tableConfiguration; + + public TableSpecPb setDestinationCatalog(String destinationCatalog) { + this.destinationCatalog = destinationCatalog; + return this; + } + + public String getDestinationCatalog() { + return destinationCatalog; + } + + public TableSpecPb setDestinationSchema(String destinationSchema) { + this.destinationSchema = destinationSchema; + return this; + } + + public String getDestinationSchema() { + return destinationSchema; + } + + public TableSpecPb setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + public String getDestinationTable() { + return destinationTable; + } + + public TableSpecPb setSourceCatalog(String sourceCatalog) { + this.sourceCatalog = sourceCatalog; + return this; + } + + public String getSourceCatalog() { + return sourceCatalog; + } + + public TableSpecPb setSourceSchema(String sourceSchema) { + this.sourceSchema = sourceSchema; + return this; + } + + public String getSourceSchema() { + return sourceSchema; + } + + public TableSpecPb setSourceTable(String sourceTable) { + this.sourceTable = sourceTable; + return this; + } + + public String getSourceTable() { + return sourceTable; + } + + public TableSpecPb setTableConfiguration(TableSpecificConfig tableConfiguration) { + this.tableConfiguration = tableConfiguration; + return this; + } + + public TableSpecificConfig getTableConfiguration() { + return tableConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableSpecPb that = (TableSpecPb) o; + return Objects.equals(destinationCatalog, that.destinationCatalog) + && Objects.equals(destinationSchema, that.destinationSchema) + && Objects.equals(destinationTable, that.destinationTable) + && Objects.equals(sourceCatalog, that.sourceCatalog) + && Objects.equals(sourceSchema, that.sourceSchema) + && Objects.equals(sourceTable, that.sourceTable) + && Objects.equals(tableConfiguration, that.tableConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash( + destinationCatalog, + destinationSchema, + destinationTable, + sourceCatalog, + sourceSchema, + sourceTable, + tableConfiguration); + } + + @Override + public String toString() { + return new ToStringer(TableSpecPb.class) + .add("destinationCatalog", destinationCatalog) + .add("destinationSchema", destinationSchema) + .add("destinationTable", destinationTable) + .add("sourceCatalog", sourceCatalog) + .add("sourceSchema", sourceSchema) + .add("sourceTable", sourceTable) + .add("tableConfiguration", tableConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java index 40843c46f..691556cc7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TableSpecificConfig.TableSpecificConfigSerializer.class) +@JsonDeserialize(using = TableSpecificConfig.TableSpecificConfigDeserializer.class) public class TableSpecificConfig { /** * A list of column names to be excluded for the ingestion. When not specified, include_columns @@ -16,7 +27,6 @@ public class TableSpecificConfig { * ones will be automatically included for ingestion. This field in mutually exclusive with * `include_columns`. */ - @JsonProperty("exclude_columns") private Collection excludeColumns; /** @@ -25,29 +35,24 @@ public class TableSpecificConfig { * specified, all other future columns will be automatically excluded from ingestion. This field * in mutually exclusive with `exclude_columns`. */ - @JsonProperty("include_columns") private Collection includeColumns; /** The primary key of the table used to apply changes. */ - @JsonProperty("primary_keys") private Collection primaryKeys; /** * If true, formula fields defined in the table are included in the ingestion. This setting is * only valid for the Salesforce connector */ - @JsonProperty("salesforce_include_formula_fields") private Boolean salesforceIncludeFormulaFields; /** The SCD type to use to ingest the table. */ - @JsonProperty("scd_type") private TableSpecificConfigScdType scdType; /** * The column names specifying the logical order of events in the source data. Delta Live Tables * uses this sequencing to handle change events that arrive out of order. */ - @JsonProperty("sequence_by") private Collection sequenceBy; public TableSpecificConfig setExcludeColumns(Collection excludeColumns) { @@ -140,4 +145,49 @@ public String toString() { .add("sequenceBy", sequenceBy) .toString(); } + + TableSpecificConfigPb toPb() { + TableSpecificConfigPb pb = new TableSpecificConfigPb(); + pb.setExcludeColumns(excludeColumns); + pb.setIncludeColumns(includeColumns); + pb.setPrimaryKeys(primaryKeys); + pb.setSalesforceIncludeFormulaFields(salesforceIncludeFormulaFields); + pb.setScdType(scdType); + pb.setSequenceBy(sequenceBy); + + return pb; + } + + static TableSpecificConfig fromPb(TableSpecificConfigPb pb) { + TableSpecificConfig model = new TableSpecificConfig(); + model.setExcludeColumns(pb.getExcludeColumns()); + model.setIncludeColumns(pb.getIncludeColumns()); + model.setPrimaryKeys(pb.getPrimaryKeys()); + model.setSalesforceIncludeFormulaFields(pb.getSalesforceIncludeFormulaFields()); + model.setScdType(pb.getScdType()); + model.setSequenceBy(pb.getSequenceBy()); + + return model; + } + + public static class TableSpecificConfigSerializer extends JsonSerializer { + @Override + public void serialize(TableSpecificConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableSpecificConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableSpecificConfigDeserializer + extends JsonDeserializer { + @Override + public TableSpecificConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableSpecificConfigPb pb = mapper.readValue(p, TableSpecificConfigPb.class); + return TableSpecificConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfigPb.java new file mode 100755 index 000000000..fca74a713 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfigPb.java @@ -0,0 +1,121 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TableSpecificConfigPb { + @JsonProperty("exclude_columns") + private Collection excludeColumns; + + @JsonProperty("include_columns") + private Collection includeColumns; + + @JsonProperty("primary_keys") + private Collection primaryKeys; + + @JsonProperty("salesforce_include_formula_fields") + private Boolean salesforceIncludeFormulaFields; + + @JsonProperty("scd_type") + private TableSpecificConfigScdType scdType; + + @JsonProperty("sequence_by") + private Collection sequenceBy; + + public TableSpecificConfigPb setExcludeColumns(Collection excludeColumns) { + this.excludeColumns = excludeColumns; + return this; + } + + public Collection getExcludeColumns() { + return excludeColumns; + } + + public TableSpecificConfigPb setIncludeColumns(Collection includeColumns) { + this.includeColumns = includeColumns; + return this; + } + + public Collection getIncludeColumns() { + return includeColumns; + } + + public TableSpecificConfigPb setPrimaryKeys(Collection primaryKeys) { + this.primaryKeys = primaryKeys; + return this; + } + + public Collection getPrimaryKeys() { + return primaryKeys; + } + + public TableSpecificConfigPb setSalesforceIncludeFormulaFields( + Boolean salesforceIncludeFormulaFields) { + this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields; + return this; + } + + public Boolean getSalesforceIncludeFormulaFields() { + return salesforceIncludeFormulaFields; + } + + public TableSpecificConfigPb setScdType(TableSpecificConfigScdType scdType) { + this.scdType = scdType; + return this; + } + + public TableSpecificConfigScdType getScdType() { + return scdType; + } + + public TableSpecificConfigPb setSequenceBy(Collection sequenceBy) { + this.sequenceBy = sequenceBy; + return this; + } + + public Collection getSequenceBy() { + return sequenceBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableSpecificConfigPb that = (TableSpecificConfigPb) o; + return Objects.equals(excludeColumns, that.excludeColumns) + && Objects.equals(includeColumns, that.includeColumns) + && Objects.equals(primaryKeys, that.primaryKeys) + && Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields) + && Objects.equals(scdType, that.scdType) + && Objects.equals(sequenceBy, that.sequenceBy); + } + + @Override + public int hashCode() { + return Objects.hash( + excludeColumns, + includeColumns, + primaryKeys, + salesforceIncludeFormulaFields, + scdType, + sequenceBy); + } + + @Override + public String toString() { + return new ToStringer(TableSpecificConfigPb.class) + .add("excludeColumns", excludeColumns) + .add("includeColumns", includeColumns) + .add("primaryKeys", primaryKeys) + .add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields) + .add("scdType", scdType) + .add("sequenceBy", sequenceBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java index 59982f4d8..4c31d917e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateInfo.UpdateInfoSerializer.class) +@JsonDeserialize(using = UpdateInfo.UpdateInfoDeserializer.class) public class UpdateInfo { /** What triggered this update. */ - @JsonProperty("cause") private UpdateInfoCause cause; /** The ID of the cluster that the update is running on. */ - @JsonProperty("cluster_id") private String clusterId; /** * The pipeline configuration with system defaults applied where unspecified by the user. Not * returned by ListUpdates. */ - @JsonProperty("config") private PipelineSpec config; /** The time when this update was created. */ - @JsonProperty("creation_time") private Long creationTime; /** If true, this update will reset all tables before running. */ - @JsonProperty("full_refresh") private Boolean fullRefresh; /** @@ -38,11 +44,9 @@ public class UpdateInfo { * full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means * that the states of the table will be reset before the refresh. */ - @JsonProperty("full_refresh_selection") private Collection fullRefreshSelection; /** The ID of the pipeline. */ - @JsonProperty("pipeline_id") private String pipelineId; /** @@ -50,22 +54,18 @@ public class UpdateInfo { * full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means * that the states of the table will be reset before the refresh. */ - @JsonProperty("refresh_selection") private Collection refreshSelection; /** The update state. */ - @JsonProperty("state") private UpdateInfoState state; /** The ID of this update. */ - @JsonProperty("update_id") private String updateId; /** * If true, this update only validates the correctness of pipeline source code but does not * materialize or publish any datasets. */ - @JsonProperty("validate_only") private Boolean validateOnly; public UpdateInfo setCause(UpdateInfoCause cause) { @@ -217,4 +217,57 @@ public String toString() { .add("validateOnly", validateOnly) .toString(); } + + UpdateInfoPb toPb() { + UpdateInfoPb pb = new UpdateInfoPb(); + pb.setCause(cause); + pb.setClusterId(clusterId); + pb.setConfig(config); + pb.setCreationTime(creationTime); + pb.setFullRefresh(fullRefresh); + pb.setFullRefreshSelection(fullRefreshSelection); + pb.setPipelineId(pipelineId); + pb.setRefreshSelection(refreshSelection); + pb.setState(state); + pb.setUpdateId(updateId); + pb.setValidateOnly(validateOnly); + + return pb; + } + + static UpdateInfo fromPb(UpdateInfoPb pb) { + UpdateInfo model = new UpdateInfo(); + model.setCause(pb.getCause()); + model.setClusterId(pb.getClusterId()); + model.setConfig(pb.getConfig()); + model.setCreationTime(pb.getCreationTime()); + model.setFullRefresh(pb.getFullRefresh()); + model.setFullRefreshSelection(pb.getFullRefreshSelection()); + model.setPipelineId(pb.getPipelineId()); + model.setRefreshSelection(pb.getRefreshSelection()); + model.setState(pb.getState()); + model.setUpdateId(pb.getUpdateId()); + model.setValidateOnly(pb.getValidateOnly()); + + return model; + } + + public static class UpdateInfoSerializer extends JsonSerializer { + @Override + public void serialize(UpdateInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateInfoDeserializer extends JsonDeserializer { + @Override + public UpdateInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateInfoPb pb = mapper.readValue(p, UpdateInfoPb.class); + return UpdateInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoPb.java new file mode 100755 index 000000000..e171e7927 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateInfoPb { + @JsonProperty("cause") + private UpdateInfoCause cause; + + @JsonProperty("cluster_id") + private String clusterId; + + @JsonProperty("config") + private PipelineSpec config; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("full_refresh") + private Boolean fullRefresh; + + @JsonProperty("full_refresh_selection") + private Collection fullRefreshSelection; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("refresh_selection") + private Collection refreshSelection; + + @JsonProperty("state") + private UpdateInfoState state; + + @JsonProperty("update_id") + private String updateId; + + @JsonProperty("validate_only") + private Boolean validateOnly; + + public UpdateInfoPb setCause(UpdateInfoCause cause) { + this.cause = cause; + return this; + } + + public UpdateInfoCause getCause() { + return cause; + } + + public UpdateInfoPb setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public UpdateInfoPb setConfig(PipelineSpec config) { + this.config = config; + return this; + } + + public PipelineSpec getConfig() { + return config; + } + + public UpdateInfoPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public UpdateInfoPb setFullRefresh(Boolean fullRefresh) { + this.fullRefresh = fullRefresh; + return this; + } + + public Boolean getFullRefresh() { + return fullRefresh; + } + + public UpdateInfoPb setFullRefreshSelection(Collection fullRefreshSelection) { + this.fullRefreshSelection = fullRefreshSelection; + return this; + } + + public Collection getFullRefreshSelection() { + return fullRefreshSelection; + } + + public UpdateInfoPb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public UpdateInfoPb setRefreshSelection(Collection refreshSelection) { + this.refreshSelection = refreshSelection; + return this; + } + + public Collection getRefreshSelection() { + return refreshSelection; + } + + public UpdateInfoPb setState(UpdateInfoState state) { + this.state = state; + return this; + } + + public UpdateInfoState getState() { + return state; + } + + public UpdateInfoPb setUpdateId(String updateId) { + this.updateId = updateId; + return this; + } + + public String getUpdateId() { + return updateId; + } + + public UpdateInfoPb setValidateOnly(Boolean validateOnly) { + this.validateOnly = validateOnly; + return this; + } + + public Boolean getValidateOnly() { + return validateOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInfoPb that = (UpdateInfoPb) o; + return Objects.equals(cause, that.cause) + && Objects.equals(clusterId, that.clusterId) + && Objects.equals(config, that.config) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(fullRefresh, that.fullRefresh) + && Objects.equals(fullRefreshSelection, that.fullRefreshSelection) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(refreshSelection, that.refreshSelection) + && Objects.equals(state, that.state) + && Objects.equals(updateId, that.updateId) + && Objects.equals(validateOnly, that.validateOnly); + } + + @Override + public int hashCode() { + return Objects.hash( + cause, + clusterId, + config, + creationTime, + fullRefresh, + fullRefreshSelection, + pipelineId, + refreshSelection, + state, + updateId, + validateOnly); + } + + @Override + public String toString() { + return new ToStringer(UpdateInfoPb.class) + .add("cause", cause) + .add("clusterId", clusterId) + .add("config", config) + .add("creationTime", creationTime) + .add("fullRefresh", fullRefresh) + .add("fullRefreshSelection", fullRefreshSelection) + .add("pipelineId", pipelineId) + .add("refreshSelection", refreshSelection) + .add("state", state) + .add("updateId", updateId) + .add("validateOnly", validateOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java index 5149ef721..9eb399cbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateStateInfo.UpdateStateInfoSerializer.class) +@JsonDeserialize(using = UpdateStateInfo.UpdateStateInfoDeserializer.class) public class UpdateStateInfo { /** */ - @JsonProperty("creation_time") private String creationTime; /** The update state. */ - @JsonProperty("state") private UpdateStateInfoState state; /** */ - @JsonProperty("update_id") private String updateId; public UpdateStateInfo setCreationTime(String creationTime) { @@ -71,4 +79,42 @@ public String toString() { .add("updateId", updateId) .toString(); } + + UpdateStateInfoPb toPb() { + UpdateStateInfoPb pb = new UpdateStateInfoPb(); + pb.setCreationTime(creationTime); + pb.setState(state); + pb.setUpdateId(updateId); + + return pb; + } + + static UpdateStateInfo fromPb(UpdateStateInfoPb pb) { + UpdateStateInfo model = new UpdateStateInfo(); + model.setCreationTime(pb.getCreationTime()); + model.setState(pb.getState()); + model.setUpdateId(pb.getUpdateId()); + + return model; + } + + public static class UpdateStateInfoSerializer extends JsonSerializer { + @Override + public void serialize(UpdateStateInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateStateInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateStateInfoDeserializer extends JsonDeserializer { + @Override + public UpdateStateInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateStateInfoPb pb = mapper.readValue(p, UpdateStateInfoPb.class); + return UpdateStateInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoPb.java new file mode 100755 index 000000000..76663bd40 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateStateInfoPb { + @JsonProperty("creation_time") + private String creationTime; + + @JsonProperty("state") + private UpdateStateInfoState state; + + @JsonProperty("update_id") + private String updateId; + + public UpdateStateInfoPb setCreationTime(String creationTime) { + this.creationTime = creationTime; + return this; + } + + public String getCreationTime() { + return creationTime; + } + + public UpdateStateInfoPb setState(UpdateStateInfoState state) { + this.state = state; + return this; + } + + public UpdateStateInfoState getState() { + return state; + } + + public UpdateStateInfoPb setUpdateId(String updateId) { + this.updateId = updateId; + return this; + } + + public String getUpdateId() { + return updateId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateStateInfoPb that = (UpdateStateInfoPb) o; + return Objects.equals(creationTime, that.creationTime) + && Objects.equals(state, that.state) + && Objects.equals(updateId, that.updateId); + } + + @Override + public int hashCode() { + return Objects.hash(creationTime, state, updateId); + } + + @Override + public String toString() { + return new ToStringer(UpdateStateInfoPb.class) + .add("creationTime", creationTime) + .add("state", state) + .add("updateId", updateId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java index b4d0faf77..6c10c2953 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AwsCredentials.AwsCredentialsSerializer.class) +@JsonDeserialize(using = AwsCredentials.AwsCredentialsDeserializer.class) public class AwsCredentials { /** */ - @JsonProperty("sts_role") private StsRole stsRole; public AwsCredentials setStsRole(StsRole stsRole) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(AwsCredentials.class).add("stsRole", stsRole).toString(); } + + AwsCredentialsPb toPb() { + AwsCredentialsPb pb = new AwsCredentialsPb(); + pb.setStsRole(stsRole); + + return pb; + } + + static AwsCredentials fromPb(AwsCredentialsPb pb) { + AwsCredentials model = new AwsCredentials(); + model.setStsRole(pb.getStsRole()); + + return model; + } + + public static class AwsCredentialsSerializer extends JsonSerializer { + @Override + public void serialize(AwsCredentials value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsCredentialsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsCredentialsDeserializer extends JsonDeserializer { + @Override + public AwsCredentials deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsCredentialsPb pb = mapper.readValue(p, AwsCredentialsPb.class); + return AwsCredentials.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentialsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentialsPb.java new file mode 100755 index 000000000..6dd2c7870 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentialsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AwsCredentialsPb { + @JsonProperty("sts_role") + private StsRole stsRole; + + public AwsCredentialsPb setStsRole(StsRole stsRole) { + this.stsRole = stsRole; + return this; + } + + public StsRole getStsRole() { + return stsRole; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsCredentialsPb that = (AwsCredentialsPb) o; + return Objects.equals(stsRole, that.stsRole); + } + + @Override + public int hashCode() { + return Objects.hash(stsRole); + } + + @Override + public String toString() { + return new ToStringer(AwsCredentialsPb.class).add("stsRole", stsRole).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java index aeaa56801..ad967ad39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AwsKeyInfo.AwsKeyInfoSerializer.class) +@JsonDeserialize(using = AwsKeyInfo.AwsKeyInfoDeserializer.class) public class AwsKeyInfo { /** The AWS KMS key alias. */ - @JsonProperty("key_alias") private String keyAlias; /** The AWS KMS key's Amazon Resource Name (ARN). */ - @JsonProperty("key_arn") private String keyArn; /** The AWS KMS key region. */ - @JsonProperty("key_region") private String keyRegion; /** @@ -26,7 +34,6 @@ public class AwsKeyInfo { * `true` or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to * use this key for encrypting EBS volumes, set to `false`. */ - @JsonProperty("reuse_key_for_cluster_volumes") private Boolean reuseKeyForClusterVolumes; public AwsKeyInfo setKeyAlias(String keyAlias) { @@ -90,4 +97,43 @@ public String toString() { .add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes) .toString(); } + + AwsKeyInfoPb toPb() { + AwsKeyInfoPb pb = new AwsKeyInfoPb(); + pb.setKeyAlias(keyAlias); + pb.setKeyArn(keyArn); + pb.setKeyRegion(keyRegion); + pb.setReuseKeyForClusterVolumes(reuseKeyForClusterVolumes); + + return pb; + } + + static AwsKeyInfo fromPb(AwsKeyInfoPb pb) { + AwsKeyInfo model = new AwsKeyInfo(); + model.setKeyAlias(pb.getKeyAlias()); + model.setKeyArn(pb.getKeyArn()); + model.setKeyRegion(pb.getKeyRegion()); + model.setReuseKeyForClusterVolumes(pb.getReuseKeyForClusterVolumes()); + + return model; + } + + public static class AwsKeyInfoSerializer extends JsonSerializer { + @Override + public void serialize(AwsKeyInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AwsKeyInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AwsKeyInfoDeserializer extends JsonDeserializer { + @Override + public AwsKeyInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AwsKeyInfoPb pb = mapper.readValue(p, AwsKeyInfoPb.class); + return AwsKeyInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfoPb.java new file mode 100755 index 000000000..17d0f79fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfoPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AwsKeyInfoPb { + @JsonProperty("key_alias") + private String keyAlias; + + @JsonProperty("key_arn") + private String keyArn; + + @JsonProperty("key_region") + private String keyRegion; + + @JsonProperty("reuse_key_for_cluster_volumes") + private Boolean reuseKeyForClusterVolumes; + + public AwsKeyInfoPb setKeyAlias(String keyAlias) { + this.keyAlias = keyAlias; + return this; + } + + public String getKeyAlias() { + return keyAlias; + } + + public AwsKeyInfoPb setKeyArn(String keyArn) { + this.keyArn = keyArn; + return this; + } + + public String getKeyArn() { + return keyArn; + } + + public AwsKeyInfoPb setKeyRegion(String keyRegion) { + this.keyRegion = keyRegion; + return this; + } + + public String getKeyRegion() { + return keyRegion; + } + + public AwsKeyInfoPb setReuseKeyForClusterVolumes(Boolean reuseKeyForClusterVolumes) { + this.reuseKeyForClusterVolumes = reuseKeyForClusterVolumes; + return this; + } + + public Boolean getReuseKeyForClusterVolumes() { + return reuseKeyForClusterVolumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsKeyInfoPb that = (AwsKeyInfoPb) o; + return Objects.equals(keyAlias, that.keyAlias) + && Objects.equals(keyArn, that.keyArn) + && Objects.equals(keyRegion, that.keyRegion) + && Objects.equals(reuseKeyForClusterVolumes, that.reuseKeyForClusterVolumes); + } + + @Override + public int hashCode() { + return Objects.hash(keyAlias, keyArn, keyRegion, reuseKeyForClusterVolumes); + } + + @Override + public String toString() { + return new ToStringer(AwsKeyInfoPb.class) + .add("keyAlias", keyAlias) + .add("keyArn", keyArn) + .add("keyRegion", keyRegion) + .add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java index 949da641b..2f8b7f415 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AzureWorkspaceInfo.AzureWorkspaceInfoSerializer.class) +@JsonDeserialize(using = AzureWorkspaceInfo.AzureWorkspaceInfoDeserializer.class) public class AzureWorkspaceInfo { /** Azure Resource Group name */ - @JsonProperty("resource_group") private String resourceGroup; /** Azure Subscription ID */ - @JsonProperty("subscription_id") private String subscriptionId; public AzureWorkspaceInfo setResourceGroup(String resourceGroup) { @@ -56,4 +65,40 @@ public String toString() { .add("subscriptionId", subscriptionId) .toString(); } + + AzureWorkspaceInfoPb toPb() { + AzureWorkspaceInfoPb pb = new AzureWorkspaceInfoPb(); + pb.setResourceGroup(resourceGroup); + pb.setSubscriptionId(subscriptionId); + + return pb; + } + + static AzureWorkspaceInfo fromPb(AzureWorkspaceInfoPb pb) { + AzureWorkspaceInfo model = new AzureWorkspaceInfo(); + model.setResourceGroup(pb.getResourceGroup()); + model.setSubscriptionId(pb.getSubscriptionId()); + + return model; + } + + public static class AzureWorkspaceInfoSerializer extends JsonSerializer { + @Override + public void serialize(AzureWorkspaceInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureWorkspaceInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureWorkspaceInfoDeserializer extends JsonDeserializer { + @Override + public AzureWorkspaceInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureWorkspaceInfoPb pb = mapper.readValue(p, AzureWorkspaceInfoPb.class); + return AzureWorkspaceInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfoPb.java new file mode 100755 index 000000000..e2bcf1738 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfoPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AzureWorkspaceInfoPb { + @JsonProperty("resource_group") + private String resourceGroup; + + @JsonProperty("subscription_id") + private String subscriptionId; + + public AzureWorkspaceInfoPb setResourceGroup(String resourceGroup) { + this.resourceGroup = resourceGroup; + return this; + } + + public String getResourceGroup() { + return resourceGroup; + } + + public AzureWorkspaceInfoPb setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureWorkspaceInfoPb that = (AzureWorkspaceInfoPb) o; + return Objects.equals(resourceGroup, that.resourceGroup) + && Objects.equals(subscriptionId, that.subscriptionId); + } + + @Override + public int hashCode() { + return Objects.hash(resourceGroup, subscriptionId); + } + + @Override + public String toString() { + return new ToStringer(AzureWorkspaceInfoPb.class) + .add("resourceGroup", resourceGroup) + .add("subscriptionId", subscriptionId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java index 31dea89b6..68c707906 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The general workspace configurations that are specific to cloud providers. */ @Generated +@JsonSerialize(using = CloudResourceContainer.CloudResourceContainerSerializer.class) +@JsonDeserialize(using = CloudResourceContainer.CloudResourceContainerDeserializer.class) public class CloudResourceContainer { /** The general workspace configurations that are specific to Google Cloud. */ - @JsonProperty("gcp") private CustomerFacingGcpCloudResourceContainer gcp; public CloudResourceContainer setGcp(CustomerFacingGcpCloudResourceContainer gcp) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(CloudResourceContainer.class).add("gcp", gcp).toString(); } + + CloudResourceContainerPb toPb() { + CloudResourceContainerPb pb = new CloudResourceContainerPb(); + pb.setGcp(gcp); + + return pb; + } + + static CloudResourceContainer fromPb(CloudResourceContainerPb pb) { + CloudResourceContainer model = new CloudResourceContainer(); + model.setGcp(pb.getGcp()); + + return model; + } + + public static class CloudResourceContainerSerializer + extends JsonSerializer { + @Override + public void serialize( + CloudResourceContainer value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CloudResourceContainerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CloudResourceContainerDeserializer + extends JsonDeserializer { + @Override + public CloudResourceContainer deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CloudResourceContainerPb pb = mapper.readValue(p, CloudResourceContainerPb.class); + return CloudResourceContainer.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainerPb.java new file mode 100755 index 000000000..aec4a588e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainerPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The general workspace configurations that are specific to cloud providers. */ +@Generated +class CloudResourceContainerPb { + @JsonProperty("gcp") + private CustomerFacingGcpCloudResourceContainer gcp; + + public CloudResourceContainerPb setGcp(CustomerFacingGcpCloudResourceContainer gcp) { + this.gcp = gcp; + return this; + } + + public CustomerFacingGcpCloudResourceContainer getGcp() { + return gcp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CloudResourceContainerPb that = (CloudResourceContainerPb) o; + return Objects.equals(gcp, that.gcp); + } + + @Override + public int hashCode() { + return Objects.hash(gcp); + } + + @Override + public String toString() { + return new ToStringer(CloudResourceContainerPb.class).add("gcp", gcp).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Converters.java new file mode 100755 index 000000000..c5be46908 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.provisioning; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java index ae5ac50f5..3435d8075 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateAwsKeyInfo.CreateAwsKeyInfoSerializer.class) +@JsonDeserialize(using = CreateAwsKeyInfo.CreateAwsKeyInfoDeserializer.class) public class CreateAwsKeyInfo { /** The AWS KMS key alias. */ - @JsonProperty("key_alias") private String keyAlias; /** * The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from * the ARN. */ - @JsonProperty("key_arn") private String keyArn; /** @@ -25,7 +34,6 @@ public class CreateAwsKeyInfo { * `true` or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key * also for encrypting EBS volumes, set this to `false`. */ - @JsonProperty("reuse_key_for_cluster_volumes") private Boolean reuseKeyForClusterVolumes; public CreateAwsKeyInfo setKeyAlias(String keyAlias) { @@ -78,4 +86,42 @@ public String toString() { .add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes) .toString(); } + + CreateAwsKeyInfoPb toPb() { + CreateAwsKeyInfoPb pb = new CreateAwsKeyInfoPb(); + pb.setKeyAlias(keyAlias); + pb.setKeyArn(keyArn); + pb.setReuseKeyForClusterVolumes(reuseKeyForClusterVolumes); + + return pb; + } + + static CreateAwsKeyInfo fromPb(CreateAwsKeyInfoPb pb) { + CreateAwsKeyInfo model = new CreateAwsKeyInfo(); + model.setKeyAlias(pb.getKeyAlias()); + model.setKeyArn(pb.getKeyArn()); + model.setReuseKeyForClusterVolumes(pb.getReuseKeyForClusterVolumes()); + + return model; + } + + public static class CreateAwsKeyInfoSerializer extends JsonSerializer { + @Override + public void serialize(CreateAwsKeyInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAwsKeyInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAwsKeyInfoDeserializer extends JsonDeserializer { + @Override + public CreateAwsKeyInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAwsKeyInfoPb pb = mapper.readValue(p, CreateAwsKeyInfoPb.class); + return CreateAwsKeyInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfoPb.java new file mode 100755 index 000000000..e43ab2f21 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfoPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateAwsKeyInfoPb { + @JsonProperty("key_alias") + private String keyAlias; + + @JsonProperty("key_arn") + private String keyArn; + + @JsonProperty("reuse_key_for_cluster_volumes") + private Boolean reuseKeyForClusterVolumes; + + public CreateAwsKeyInfoPb setKeyAlias(String keyAlias) { + this.keyAlias = keyAlias; + return this; + } + + public String getKeyAlias() { + return keyAlias; + } + + public CreateAwsKeyInfoPb setKeyArn(String keyArn) { + this.keyArn = keyArn; + return this; + } + + public String getKeyArn() { + return keyArn; + } + + public CreateAwsKeyInfoPb setReuseKeyForClusterVolumes(Boolean reuseKeyForClusterVolumes) { + this.reuseKeyForClusterVolumes = reuseKeyForClusterVolumes; + return this; + } + + public Boolean getReuseKeyForClusterVolumes() { + return reuseKeyForClusterVolumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAwsKeyInfoPb that = (CreateAwsKeyInfoPb) o; + return Objects.equals(keyAlias, that.keyAlias) + && Objects.equals(keyArn, that.keyArn) + && Objects.equals(reuseKeyForClusterVolumes, that.reuseKeyForClusterVolumes); + } + + @Override + public int hashCode() { + return Objects.hash(keyAlias, keyArn, reuseKeyForClusterVolumes); + } + + @Override + public String toString() { + return new ToStringer(CreateAwsKeyInfoPb.class) + .add("keyAlias", keyAlias) + .add("keyArn", keyArn) + .add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentials.java index dae553524..c9e7d0355 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentials.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateCredentialAwsCredentials.CreateCredentialAwsCredentialsSerializer.class) +@JsonDeserialize( + using = CreateCredentialAwsCredentials.CreateCredentialAwsCredentialsDeserializer.class) public class CreateCredentialAwsCredentials { /** */ - @JsonProperty("sts_role") private CreateCredentialStsRole stsRole; public CreateCredentialAwsCredentials setStsRole(CreateCredentialStsRole stsRole) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(CreateCredentialAwsCredentials.class).add("stsRole", stsRole).toString(); } + + CreateCredentialAwsCredentialsPb toPb() { + CreateCredentialAwsCredentialsPb pb = new CreateCredentialAwsCredentialsPb(); + pb.setStsRole(stsRole); + + return pb; + } + + static CreateCredentialAwsCredentials fromPb(CreateCredentialAwsCredentialsPb pb) { + CreateCredentialAwsCredentials model = new CreateCredentialAwsCredentials(); + model.setStsRole(pb.getStsRole()); + + return model; + } + + public static class CreateCredentialAwsCredentialsSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialAwsCredentials value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialAwsCredentialsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialAwsCredentialsDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialAwsCredentials deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialAwsCredentialsPb pb = + mapper.readValue(p, CreateCredentialAwsCredentialsPb.class); + return CreateCredentialAwsCredentials.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentialsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentialsPb.java new file mode 100755 index 000000000..cdc48167c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialAwsCredentialsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialAwsCredentialsPb { + @JsonProperty("sts_role") + private CreateCredentialStsRole stsRole; + + public CreateCredentialAwsCredentialsPb setStsRole(CreateCredentialStsRole stsRole) { + this.stsRole = stsRole; + return this; + } + + public CreateCredentialStsRole getStsRole() { + return stsRole; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialAwsCredentialsPb that = (CreateCredentialAwsCredentialsPb) o; + return Objects.equals(stsRole, that.stsRole); + } + + @Override + public int hashCode() { + return Objects.hash(stsRole); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialAwsCredentialsPb.class) + .add("stsRole", stsRole) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequest.java index 7c4a5bcbe..d597fee88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequest.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCredentialRequest.CreateCredentialRequestSerializer.class) +@JsonDeserialize(using = CreateCredentialRequest.CreateCredentialRequestDeserializer.class) public class CreateCredentialRequest { /** */ - @JsonProperty("aws_credentials") private CreateCredentialAwsCredentials awsCredentials; /** The human-readable name of the credential configuration object. */ - @JsonProperty("credentials_name") private String credentialsName; public CreateCredentialRequest setAwsCredentials(CreateCredentialAwsCredentials awsCredentials) { @@ -56,4 +65,43 @@ public String toString() { .add("credentialsName", credentialsName) .toString(); } + + CreateCredentialRequestPb toPb() { + CreateCredentialRequestPb pb = new CreateCredentialRequestPb(); + pb.setAwsCredentials(awsCredentials); + pb.setCredentialsName(credentialsName); + + return pb; + } + + static CreateCredentialRequest fromPb(CreateCredentialRequestPb pb) { + CreateCredentialRequest model = new CreateCredentialRequest(); + model.setAwsCredentials(pb.getAwsCredentials()); + model.setCredentialsName(pb.getCredentialsName()); + + return model; + } + + public static class CreateCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialRequestPb pb = mapper.readValue(p, CreateCredentialRequestPb.class); + return CreateCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequestPb.java new file mode 100755 index 000000000..8b6d3a06e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialRequestPb { + @JsonProperty("aws_credentials") + private CreateCredentialAwsCredentials awsCredentials; + + @JsonProperty("credentials_name") + private String credentialsName; + + public CreateCredentialRequestPb setAwsCredentials( + CreateCredentialAwsCredentials awsCredentials) { + this.awsCredentials = awsCredentials; + return this; + } + + public CreateCredentialAwsCredentials getAwsCredentials() { + return awsCredentials; + } + + public CreateCredentialRequestPb setCredentialsName(String credentialsName) { + this.credentialsName = credentialsName; + return this; + } + + public String getCredentialsName() { + return credentialsName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialRequestPb that = (CreateCredentialRequestPb) o; + return Objects.equals(awsCredentials, that.awsCredentials) + && Objects.equals(credentialsName, that.credentialsName); + } + + @Override + public int hashCode() { + return Objects.hash(awsCredentials, credentialsName); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialRequestPb.class) + .add("awsCredentials", awsCredentials) + .add("credentialsName", credentialsName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java index 461005798..6ee6956c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCredentialStsRole.CreateCredentialStsRoleSerializer.class) +@JsonDeserialize(using = CreateCredentialStsRole.CreateCredentialStsRoleDeserializer.class) public class CreateCredentialStsRole { /** The Amazon Resource Name (ARN) of the cross account role. */ - @JsonProperty("role_arn") private String roleArn; public CreateCredentialStsRole setRoleArn(String roleArn) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateCredentialStsRole.class).add("roleArn", roleArn).toString(); } + + CreateCredentialStsRolePb toPb() { + CreateCredentialStsRolePb pb = new CreateCredentialStsRolePb(); + pb.setRoleArn(roleArn); + + return pb; + } + + static CreateCredentialStsRole fromPb(CreateCredentialStsRolePb pb) { + CreateCredentialStsRole model = new CreateCredentialStsRole(); + model.setRoleArn(pb.getRoleArn()); + + return model; + } + + public static class CreateCredentialStsRoleSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialStsRole value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialStsRolePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialStsRoleDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialStsRole deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialStsRolePb pb = mapper.readValue(p, CreateCredentialStsRolePb.class); + return CreateCredentialStsRole.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRolePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRolePb.java new file mode 100755 index 000000000..b4d6209de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRolePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialStsRolePb { + @JsonProperty("role_arn") + private String roleArn; + + public CreateCredentialStsRolePb setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialStsRolePb that = (CreateCredentialStsRolePb) o; + return Objects.equals(roleArn, that.roleArn); + } + + @Override + public int hashCode() { + return Objects.hash(roleArn); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialStsRolePb.class).add("roleArn", roleArn).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequest.java index 5ee64fc70..38205a8ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequest.java @@ -4,22 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateCustomerManagedKeyRequest.CreateCustomerManagedKeyRequestSerializer.class) +@JsonDeserialize( + using = CreateCustomerManagedKeyRequest.CreateCustomerManagedKeyRequestDeserializer.class) public class CreateCustomerManagedKeyRequest { /** */ - @JsonProperty("aws_key_info") private CreateAwsKeyInfo awsKeyInfo; /** */ - @JsonProperty("gcp_key_info") private CreateGcpKeyInfo gcpKeyInfo; /** The cases that the key can be used for. */ - @JsonProperty("use_cases") private Collection useCases; public CreateCustomerManagedKeyRequest setAwsKeyInfo(CreateAwsKeyInfo awsKeyInfo) { @@ -72,4 +82,46 @@ public String toString() { .add("useCases", useCases) .toString(); } + + CreateCustomerManagedKeyRequestPb toPb() { + CreateCustomerManagedKeyRequestPb pb = new CreateCustomerManagedKeyRequestPb(); + pb.setAwsKeyInfo(awsKeyInfo); + pb.setGcpKeyInfo(gcpKeyInfo); + pb.setUseCases(useCases); + + return pb; + } + + static CreateCustomerManagedKeyRequest fromPb(CreateCustomerManagedKeyRequestPb pb) { + CreateCustomerManagedKeyRequest model = new CreateCustomerManagedKeyRequest(); + model.setAwsKeyInfo(pb.getAwsKeyInfo()); + model.setGcpKeyInfo(pb.getGcpKeyInfo()); + model.setUseCases(pb.getUseCases()); + + return model; + } + + public static class CreateCustomerManagedKeyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCustomerManagedKeyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCustomerManagedKeyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCustomerManagedKeyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCustomerManagedKeyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCustomerManagedKeyRequestPb pb = + mapper.readValue(p, CreateCustomerManagedKeyRequestPb.class); + return CreateCustomerManagedKeyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequestPb.java new file mode 100755 index 000000000..380b11b4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCustomerManagedKeyRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateCustomerManagedKeyRequestPb { + @JsonProperty("aws_key_info") + private CreateAwsKeyInfo awsKeyInfo; + + @JsonProperty("gcp_key_info") + private CreateGcpKeyInfo gcpKeyInfo; + + @JsonProperty("use_cases") + private Collection useCases; + + public CreateCustomerManagedKeyRequestPb setAwsKeyInfo(CreateAwsKeyInfo awsKeyInfo) { + this.awsKeyInfo = awsKeyInfo; + return this; + } + + public CreateAwsKeyInfo getAwsKeyInfo() { + return awsKeyInfo; + } + + public CreateCustomerManagedKeyRequestPb setGcpKeyInfo(CreateGcpKeyInfo gcpKeyInfo) { + this.gcpKeyInfo = gcpKeyInfo; + return this; + } + + public CreateGcpKeyInfo getGcpKeyInfo() { + return gcpKeyInfo; + } + + public CreateCustomerManagedKeyRequestPb setUseCases(Collection useCases) { + this.useCases = useCases; + return this; + } + + public Collection getUseCases() { + return useCases; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCustomerManagedKeyRequestPb that = (CreateCustomerManagedKeyRequestPb) o; + return Objects.equals(awsKeyInfo, that.awsKeyInfo) + && Objects.equals(gcpKeyInfo, that.gcpKeyInfo) + && Objects.equals(useCases, that.useCases); + } + + @Override + public int hashCode() { + return Objects.hash(awsKeyInfo, gcpKeyInfo, useCases); + } + + @Override + public String toString() { + return new ToStringer(CreateCustomerManagedKeyRequestPb.class) + .add("awsKeyInfo", awsKeyInfo) + .add("gcpKeyInfo", gcpKeyInfo) + .add("useCases", useCases) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java index 2d62f0470..4b927e6e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateGcpKeyInfo.CreateGcpKeyInfoSerializer.class) +@JsonDeserialize(using = CreateGcpKeyInfo.CreateGcpKeyInfoDeserializer.class) public class CreateGcpKeyInfo { /** The GCP KMS key's resource name */ - @JsonProperty("kms_key_id") private String kmsKeyId; public CreateGcpKeyInfo setKmsKeyId(String kmsKeyId) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(CreateGcpKeyInfo.class).add("kmsKeyId", kmsKeyId).toString(); } + + CreateGcpKeyInfoPb toPb() { + CreateGcpKeyInfoPb pb = new CreateGcpKeyInfoPb(); + pb.setKmsKeyId(kmsKeyId); + + return pb; + } + + static CreateGcpKeyInfo fromPb(CreateGcpKeyInfoPb pb) { + CreateGcpKeyInfo model = new CreateGcpKeyInfo(); + model.setKmsKeyId(pb.getKmsKeyId()); + + return model; + } + + public static class CreateGcpKeyInfoSerializer extends JsonSerializer { + @Override + public void serialize(CreateGcpKeyInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateGcpKeyInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateGcpKeyInfoDeserializer extends JsonDeserializer { + @Override + public CreateGcpKeyInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateGcpKeyInfoPb pb = mapper.readValue(p, CreateGcpKeyInfoPb.class); + return CreateGcpKeyInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfoPb.java new file mode 100755 index 000000000..77ce1ace7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfoPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateGcpKeyInfoPb { + @JsonProperty("kms_key_id") + private String kmsKeyId; + + public CreateGcpKeyInfoPb setKmsKeyId(String kmsKeyId) { + this.kmsKeyId = kmsKeyId; + return this; + } + + public String getKmsKeyId() { + return kmsKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateGcpKeyInfoPb that = (CreateGcpKeyInfoPb) o; + return Objects.equals(kmsKeyId, that.kmsKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(kmsKeyId); + } + + @Override + public String toString() { + return new ToStringer(CreateGcpKeyInfoPb.class).add("kmsKeyId", kmsKeyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java index 0c7ef5613..e45528e43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java @@ -4,35 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateNetworkRequest.CreateNetworkRequestSerializer.class) +@JsonDeserialize(using = CreateNetworkRequest.CreateNetworkRequestDeserializer.class) public class CreateNetworkRequest { /** * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and * secondary IP ranges). */ - @JsonProperty("gcp_network_info") private GcpNetworkInfo gcpNetworkInfo; /** The human-readable name of the network configuration. */ - @JsonProperty("network_name") private String networkName; /** * IDs of one to five security groups associated with this network. Security group IDs **cannot** * be used in multiple network configurations. */ - @JsonProperty("security_group_ids") private Collection securityGroupIds; /** * IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in * multiple network configurations. */ - @JsonProperty("subnet_ids") private Collection subnetIds; /** @@ -41,14 +48,12 @@ public class CreateNetworkRequest { * *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ */ - @JsonProperty("vpc_endpoints") private NetworkVpcEndpoints vpcEndpoints; /** * The ID of the VPC associated with this network. VPC IDs can be used in multiple network * configurations. */ - @JsonProperty("vpc_id") private String vpcId; public CreateNetworkRequest setGcpNetworkInfo(GcpNetworkInfo gcpNetworkInfo) { @@ -135,4 +140,50 @@ public String toString() { .add("vpcId", vpcId) .toString(); } + + CreateNetworkRequestPb toPb() { + CreateNetworkRequestPb pb = new CreateNetworkRequestPb(); + pb.setGcpNetworkInfo(gcpNetworkInfo); + pb.setNetworkName(networkName); + pb.setSecurityGroupIds(securityGroupIds); + pb.setSubnetIds(subnetIds); + pb.setVpcEndpoints(vpcEndpoints); + pb.setVpcId(vpcId); + + return pb; + } + + static CreateNetworkRequest fromPb(CreateNetworkRequestPb pb) { + CreateNetworkRequest model = new CreateNetworkRequest(); + model.setGcpNetworkInfo(pb.getGcpNetworkInfo()); + model.setNetworkName(pb.getNetworkName()); + model.setSecurityGroupIds(pb.getSecurityGroupIds()); + model.setSubnetIds(pb.getSubnetIds()); + model.setVpcEndpoints(pb.getVpcEndpoints()); + model.setVpcId(pb.getVpcId()); + + return model; + } + + public static class CreateNetworkRequestSerializer extends JsonSerializer { + @Override + public void serialize( + CreateNetworkRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateNetworkRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateNetworkRequestDeserializer + extends JsonDeserializer { + @Override + public CreateNetworkRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateNetworkRequestPb pb = mapper.readValue(p, CreateNetworkRequestPb.class); + return CreateNetworkRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequestPb.java new file mode 100755 index 000000000..e02f514fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequestPb.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateNetworkRequestPb { + @JsonProperty("gcp_network_info") + private GcpNetworkInfo gcpNetworkInfo; + + @JsonProperty("network_name") + private String networkName; + + @JsonProperty("security_group_ids") + private Collection securityGroupIds; + + @JsonProperty("subnet_ids") + private Collection subnetIds; + + @JsonProperty("vpc_endpoints") + private NetworkVpcEndpoints vpcEndpoints; + + @JsonProperty("vpc_id") + private String vpcId; + + public CreateNetworkRequestPb setGcpNetworkInfo(GcpNetworkInfo gcpNetworkInfo) { + this.gcpNetworkInfo = gcpNetworkInfo; + return this; + } + + public GcpNetworkInfo getGcpNetworkInfo() { + return gcpNetworkInfo; + } + + public CreateNetworkRequestPb setNetworkName(String networkName) { + this.networkName = networkName; + return this; + } + + public String getNetworkName() { + return networkName; + } + + public CreateNetworkRequestPb setSecurityGroupIds(Collection securityGroupIds) { + this.securityGroupIds = securityGroupIds; + return this; + } + + public Collection getSecurityGroupIds() { + return securityGroupIds; + } + + public CreateNetworkRequestPb setSubnetIds(Collection subnetIds) { + this.subnetIds = subnetIds; + return this; + } + + public Collection getSubnetIds() { + return subnetIds; + } + + public CreateNetworkRequestPb setVpcEndpoints(NetworkVpcEndpoints vpcEndpoints) { + this.vpcEndpoints = vpcEndpoints; + return this; + } + + public NetworkVpcEndpoints getVpcEndpoints() { + return vpcEndpoints; + } + + public CreateNetworkRequestPb setVpcId(String vpcId) { + this.vpcId = vpcId; + return this; + } + + public String getVpcId() { + return vpcId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNetworkRequestPb that = (CreateNetworkRequestPb) o; + return Objects.equals(gcpNetworkInfo, that.gcpNetworkInfo) + && Objects.equals(networkName, that.networkName) + && Objects.equals(securityGroupIds, that.securityGroupIds) + && Objects.equals(subnetIds, that.subnetIds) + && Objects.equals(vpcEndpoints, that.vpcEndpoints) + && Objects.equals(vpcId, that.vpcId); + } + + @Override + public int hashCode() { + return Objects.hash( + gcpNetworkInfo, networkName, securityGroupIds, subnetIds, vpcEndpoints, vpcId); + } + + @Override + public String toString() { + return new ToStringer(CreateNetworkRequestPb.class) + .add("gcpNetworkInfo", gcpNetworkInfo) + .add("networkName", networkName) + .add("securityGroupIds", securityGroupIds) + .add("subnetIds", subnetIds) + .add("vpcEndpoints", vpcEndpoints) + .add("vpcId", vpcId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java index fdc409d47..187d3e3a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = CreateStorageConfigurationRequest.CreateStorageConfigurationRequestSerializer.class) +@JsonDeserialize( + using = CreateStorageConfigurationRequest.CreateStorageConfigurationRequestDeserializer.class) public class CreateStorageConfigurationRequest { /** Root S3 bucket information. */ - @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; /** The human-readable name of the storage configuration. */ - @JsonProperty("storage_configuration_name") private String storageConfigurationName; public CreateStorageConfigurationRequest setRootBucketInfo(RootBucketInfo rootBucketInfo) { @@ -57,4 +68,44 @@ public String toString() { .add("storageConfigurationName", storageConfigurationName) .toString(); } + + CreateStorageConfigurationRequestPb toPb() { + CreateStorageConfigurationRequestPb pb = new CreateStorageConfigurationRequestPb(); + pb.setRootBucketInfo(rootBucketInfo); + pb.setStorageConfigurationName(storageConfigurationName); + + return pb; + } + + static CreateStorageConfigurationRequest fromPb(CreateStorageConfigurationRequestPb pb) { + CreateStorageConfigurationRequest model = new CreateStorageConfigurationRequest(); + model.setRootBucketInfo(pb.getRootBucketInfo()); + model.setStorageConfigurationName(pb.getStorageConfigurationName()); + + return model; + } + + public static class CreateStorageConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateStorageConfigurationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateStorageConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateStorageConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public CreateStorageConfigurationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateStorageConfigurationRequestPb pb = + mapper.readValue(p, CreateStorageConfigurationRequestPb.class); + return CreateStorageConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequestPb.java new file mode 100755 index 000000000..da67538c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateStorageConfigurationRequestPb { + @JsonProperty("root_bucket_info") + private RootBucketInfo rootBucketInfo; + + @JsonProperty("storage_configuration_name") + private String storageConfigurationName; + + public CreateStorageConfigurationRequestPb setRootBucketInfo(RootBucketInfo rootBucketInfo) { + this.rootBucketInfo = rootBucketInfo; + return this; + } + + public RootBucketInfo getRootBucketInfo() { + return rootBucketInfo; + } + + public CreateStorageConfigurationRequestPb setStorageConfigurationName( + String storageConfigurationName) { + this.storageConfigurationName = storageConfigurationName; + return this; + } + + public String getStorageConfigurationName() { + return storageConfigurationName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateStorageConfigurationRequestPb that = (CreateStorageConfigurationRequestPb) o; + return Objects.equals(rootBucketInfo, that.rootBucketInfo) + && Objects.equals(storageConfigurationName, that.storageConfigurationName); + } + + @Override + public int hashCode() { + return Objects.hash(rootBucketInfo, storageConfigurationName); + } + + @Override + public String toString() { + return new ToStringer(CreateStorageConfigurationRequestPb.class) + .add("rootBucketInfo", rootBucketInfo) + .add("storageConfigurationName", storageConfigurationName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java index 1d87036a1..28c15791c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateVpcEndpointRequest.CreateVpcEndpointRequestSerializer.class) +@JsonDeserialize(using = CreateVpcEndpointRequest.CreateVpcEndpointRequestDeserializer.class) public class CreateVpcEndpointRequest { /** The ID of the VPC endpoint object in AWS. */ - @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; /** The Google Cloud specific information for this Private Service Connect endpoint. */ - @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; /** The AWS region in which this VPC endpoint object exists. */ - @JsonProperty("region") private String region; /** The human-readable name of the storage configuration. */ - @JsonProperty("vpc_endpoint_name") private String vpcEndpointName; public CreateVpcEndpointRequest setAwsVpcEndpointId(String awsVpcEndpointId) { @@ -86,4 +93,47 @@ public String toString() { .add("vpcEndpointName", vpcEndpointName) .toString(); } + + CreateVpcEndpointRequestPb toPb() { + CreateVpcEndpointRequestPb pb = new CreateVpcEndpointRequestPb(); + pb.setAwsVpcEndpointId(awsVpcEndpointId); + pb.setGcpVpcEndpointInfo(gcpVpcEndpointInfo); + pb.setRegion(region); + pb.setVpcEndpointName(vpcEndpointName); + + return pb; + } + + static CreateVpcEndpointRequest fromPb(CreateVpcEndpointRequestPb pb) { + CreateVpcEndpointRequest model = new CreateVpcEndpointRequest(); + model.setAwsVpcEndpointId(pb.getAwsVpcEndpointId()); + model.setGcpVpcEndpointInfo(pb.getGcpVpcEndpointInfo()); + model.setRegion(pb.getRegion()); + model.setVpcEndpointName(pb.getVpcEndpointName()); + + return model; + } + + public static class CreateVpcEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateVpcEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateVpcEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateVpcEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public CreateVpcEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateVpcEndpointRequestPb pb = mapper.readValue(p, CreateVpcEndpointRequestPb.class); + return CreateVpcEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequestPb.java new file mode 100755 index 000000000..ac975a0e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateVpcEndpointRequestPb { + @JsonProperty("aws_vpc_endpoint_id") + private String awsVpcEndpointId; + + @JsonProperty("gcp_vpc_endpoint_info") + private GcpVpcEndpointInfo gcpVpcEndpointInfo; + + @JsonProperty("region") + private String region; + + @JsonProperty("vpc_endpoint_name") + private String vpcEndpointName; + + public CreateVpcEndpointRequestPb setAwsVpcEndpointId(String awsVpcEndpointId) { + this.awsVpcEndpointId = awsVpcEndpointId; + return this; + } + + public String getAwsVpcEndpointId() { + return awsVpcEndpointId; + } + + public CreateVpcEndpointRequestPb setGcpVpcEndpointInfo(GcpVpcEndpointInfo gcpVpcEndpointInfo) { + this.gcpVpcEndpointInfo = gcpVpcEndpointInfo; + return this; + } + + public GcpVpcEndpointInfo getGcpVpcEndpointInfo() { + return gcpVpcEndpointInfo; + } + + public CreateVpcEndpointRequestPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public CreateVpcEndpointRequestPb setVpcEndpointName(String vpcEndpointName) { + this.vpcEndpointName = vpcEndpointName; + return this; + } + + public String getVpcEndpointName() { + return vpcEndpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateVpcEndpointRequestPb that = (CreateVpcEndpointRequestPb) o; + return Objects.equals(awsVpcEndpointId, that.awsVpcEndpointId) + && Objects.equals(gcpVpcEndpointInfo, that.gcpVpcEndpointInfo) + && Objects.equals(region, that.region) + && Objects.equals(vpcEndpointName, that.vpcEndpointName); + } + + @Override + public int hashCode() { + return Objects.hash(awsVpcEndpointId, gcpVpcEndpointInfo, region, vpcEndpointName); + } + + @Override + public String toString() { + return new ToStringer(CreateVpcEndpointRequestPb.class) + .add("awsVpcEndpointId", awsVpcEndpointId) + .add("gcpVpcEndpointInfo", gcpVpcEndpointInfo) + .add("region", region) + .add("vpcEndpointName", vpcEndpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java index 2cc6ec80a..d5a48ed5d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java @@ -4,29 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = CreateWorkspaceRequest.CreateWorkspaceRequestSerializer.class) +@JsonDeserialize(using = CreateWorkspaceRequest.CreateWorkspaceRequestDeserializer.class) public class CreateWorkspaceRequest { /** The AWS region of the workspace's data plane. */ - @JsonProperty("aws_region") private String awsRegion; /** * The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field * to `gcp`. */ - @JsonProperty("cloud") private String cloud; /** The general workspace configurations that are specific to cloud providers. */ - @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; /** ID of the workspace's credential configuration object. */ - @JsonProperty("credentials_id") private String credentialsId; /** @@ -34,7 +41,6 @@ public class CreateWorkspaceRequest { * string of utf-8 characters. The value can be an empty string, with maximum length of 255 * characters. The key can be of maximum length of 127 characters, and cannot be empty. */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -62,7 +68,6 @@ public class CreateWorkspaceRequest { *

If a new workspace omits this property, the server generates a unique deployment name for * you with the pattern `dbc-xxxxxxxx-xxxx`. */ - @JsonProperty("deployment_name") private String deploymentName; /** @@ -86,22 +91,18 @@ public class CreateWorkspaceRequest { *

[calculate subnet sizes for a new workspace]: * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html */ - @JsonProperty("gcp_managed_network_config") private GcpManagedNetworkConfig gcpManagedNetworkConfig; /** The configurations for the GKE cluster of a Databricks workspace. */ - @JsonProperty("gke_config") private GkeConfig gkeConfig; /** Whether no public IP is enabled for the workspace. */ - @JsonProperty("is_no_public_ip_enabled") private Boolean isNoPublicIpEnabled; /** * The Google Cloud region of the workspace data plane in your Google account. For example, * `us-east4`. */ - @JsonProperty("location") private String location; /** @@ -110,11 +111,9 @@ public class CreateWorkspaceRequest { * and query history. The provided key configuration object property `use_cases` must contain * `MANAGED_SERVICES`. */ - @JsonProperty("managed_services_customer_managed_key_id") private String managedServicesCustomerManagedKeyId; /** */ - @JsonProperty("network_id") private String networkId; /** @@ -122,7 +121,6 @@ public class CreateWorkspaceRequest { * *

[AWS Pricing]: https://databricks.com/product/aws-pricing */ - @JsonProperty("pricing_tier") private PricingTier pricingTier; /** @@ -136,11 +134,9 @@ public class CreateWorkspaceRequest { * PrivateLink]: * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html */ - @JsonProperty("private_access_settings_id") private String privateAccessSettingsId; /** The ID of the workspace's storage configuration object. */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** @@ -148,11 +144,9 @@ public class CreateWorkspaceRequest { * the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS * volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. */ - @JsonProperty("storage_customer_managed_key_id") private String storageCustomerManagedKeyId; /** The workspace's human-readable name. */ - @JsonProperty("workspace_name") private String workspaceName; public CreateWorkspaceRequest setAwsRegion(String awsRegion) { @@ -380,4 +374,73 @@ public String toString() { .add("workspaceName", workspaceName) .toString(); } + + CreateWorkspaceRequestPb toPb() { + CreateWorkspaceRequestPb pb = new CreateWorkspaceRequestPb(); + pb.setAwsRegion(awsRegion); + pb.setCloud(cloud); + pb.setCloudResourceContainer(cloudResourceContainer); + pb.setCredentialsId(credentialsId); + pb.setCustomTags(customTags); + pb.setDeploymentName(deploymentName); + pb.setGcpManagedNetworkConfig(gcpManagedNetworkConfig); + pb.setGkeConfig(gkeConfig); + pb.setIsNoPublicIpEnabled(isNoPublicIpEnabled); + pb.setLocation(location); + pb.setManagedServicesCustomerManagedKeyId(managedServicesCustomerManagedKeyId); + pb.setNetworkId(networkId); + pb.setPricingTier(pricingTier); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setStorageCustomerManagedKeyId(storageCustomerManagedKeyId); + pb.setWorkspaceName(workspaceName); + + return pb; + } + + static CreateWorkspaceRequest fromPb(CreateWorkspaceRequestPb pb) { + CreateWorkspaceRequest model = new CreateWorkspaceRequest(); + model.setAwsRegion(pb.getAwsRegion()); + model.setCloud(pb.getCloud()); + model.setCloudResourceContainer(pb.getCloudResourceContainer()); + model.setCredentialsId(pb.getCredentialsId()); + model.setCustomTags(pb.getCustomTags()); + model.setDeploymentName(pb.getDeploymentName()); + model.setGcpManagedNetworkConfig(pb.getGcpManagedNetworkConfig()); + model.setGkeConfig(pb.getGkeConfig()); + model.setIsNoPublicIpEnabled(pb.getIsNoPublicIpEnabled()); + model.setLocation(pb.getLocation()); + model.setManagedServicesCustomerManagedKeyId(pb.getManagedServicesCustomerManagedKeyId()); + model.setNetworkId(pb.getNetworkId()); + model.setPricingTier(pb.getPricingTier()); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setStorageCustomerManagedKeyId(pb.getStorageCustomerManagedKeyId()); + model.setWorkspaceName(pb.getWorkspaceName()); + + return model; + } + + public static class CreateWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateWorkspaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public CreateWorkspaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateWorkspaceRequestPb pb = mapper.readValue(p, CreateWorkspaceRequestPb.class); + return CreateWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequestPb.java new file mode 100755 index 000000000..67cb428d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequestPb.java @@ -0,0 +1,290 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class CreateWorkspaceRequestPb { + @JsonProperty("aws_region") + private String awsRegion; + + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("cloud_resource_container") + private CloudResourceContainer cloudResourceContainer; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("deployment_name") + private String deploymentName; + + @JsonProperty("gcp_managed_network_config") + private GcpManagedNetworkConfig gcpManagedNetworkConfig; + + @JsonProperty("gke_config") + private GkeConfig gkeConfig; + + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + + @JsonProperty("location") + private String location; + + @JsonProperty("managed_services_customer_managed_key_id") + private String managedServicesCustomerManagedKeyId; + + @JsonProperty("network_id") + private String networkId; + + @JsonProperty("pricing_tier") + private PricingTier pricingTier; + + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("storage_customer_managed_key_id") + private String storageCustomerManagedKeyId; + + @JsonProperty("workspace_name") + private String workspaceName; + + public CreateWorkspaceRequestPb setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + return this; + } + + public String getAwsRegion() { + return awsRegion; + } + + public CreateWorkspaceRequestPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public CreateWorkspaceRequestPb setCloudResourceContainer( + CloudResourceContainer cloudResourceContainer) { + this.cloudResourceContainer = cloudResourceContainer; + return this; + } + + public CloudResourceContainer getCloudResourceContainer() { + return cloudResourceContainer; + } + + public CreateWorkspaceRequestPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public CreateWorkspaceRequestPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public CreateWorkspaceRequestPb setDeploymentName(String deploymentName) { + this.deploymentName = deploymentName; + return this; + } + + public String getDeploymentName() { + return deploymentName; + } + + public CreateWorkspaceRequestPb setGcpManagedNetworkConfig( + GcpManagedNetworkConfig gcpManagedNetworkConfig) { + this.gcpManagedNetworkConfig = gcpManagedNetworkConfig; + return this; + } + + public GcpManagedNetworkConfig getGcpManagedNetworkConfig() { + return gcpManagedNetworkConfig; + } + + public CreateWorkspaceRequestPb setGkeConfig(GkeConfig gkeConfig) { + this.gkeConfig = gkeConfig; + return this; + } + + public GkeConfig getGkeConfig() { + return gkeConfig; + } + + public CreateWorkspaceRequestPb setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + + public CreateWorkspaceRequestPb setLocation(String location) { + this.location = location; + return this; + } + + public String getLocation() { + return location; + } + + public CreateWorkspaceRequestPb setManagedServicesCustomerManagedKeyId( + String managedServicesCustomerManagedKeyId) { + this.managedServicesCustomerManagedKeyId = managedServicesCustomerManagedKeyId; + return this; + } + + public String getManagedServicesCustomerManagedKeyId() { + return managedServicesCustomerManagedKeyId; + } + + public CreateWorkspaceRequestPb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + public CreateWorkspaceRequestPb setPricingTier(PricingTier pricingTier) { + this.pricingTier = pricingTier; + return this; + } + + public PricingTier getPricingTier() { + return pricingTier; + } + + public CreateWorkspaceRequestPb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + public CreateWorkspaceRequestPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public CreateWorkspaceRequestPb setStorageCustomerManagedKeyId( + String storageCustomerManagedKeyId) { + this.storageCustomerManagedKeyId = storageCustomerManagedKeyId; + return this; + } + + public String getStorageCustomerManagedKeyId() { + return storageCustomerManagedKeyId; + } + + public CreateWorkspaceRequestPb setWorkspaceName(String workspaceName) { + this.workspaceName = workspaceName; + return this; + } + + public String getWorkspaceName() { + return workspaceName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWorkspaceRequestPb that = (CreateWorkspaceRequestPb) o; + return Objects.equals(awsRegion, that.awsRegion) + && Objects.equals(cloud, that.cloud) + && Objects.equals(cloudResourceContainer, that.cloudResourceContainer) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(customTags, that.customTags) + && Objects.equals(deploymentName, that.deploymentName) + && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) + && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) + && Objects.equals(location, that.location) + && Objects.equals( + managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) + && Objects.equals(networkId, that.networkId) + && Objects.equals(pricingTier, that.pricingTier) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) + && Objects.equals(workspaceName, that.workspaceName); + } + + @Override + public int hashCode() { + return Objects.hash( + awsRegion, + cloud, + cloudResourceContainer, + credentialsId, + customTags, + deploymentName, + gcpManagedNetworkConfig, + gkeConfig, + isNoPublicIpEnabled, + location, + managedServicesCustomerManagedKeyId, + networkId, + pricingTier, + privateAccessSettingsId, + storageConfigurationId, + storageCustomerManagedKeyId, + workspaceName); + } + + @Override + public String toString() { + return new ToStringer(CreateWorkspaceRequestPb.class) + .add("awsRegion", awsRegion) + .add("cloud", cloud) + .add("cloudResourceContainer", cloudResourceContainer) + .add("credentialsId", credentialsId) + .add("customTags", customTags) + .add("deploymentName", deploymentName) + .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) + .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) + .add("location", location) + .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) + .add("networkId", networkId) + .add("pricingTier", pricingTier) + .add("privateAccessSettingsId", privateAccessSettingsId) + .add("storageConfigurationId", storageConfigurationId) + .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) + .add("workspaceName", workspaceName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Credential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Credential.java index 42e97fffc..31d637932 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Credential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Credential.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Credential.CredentialSerializer.class) +@JsonDeserialize(using = Credential.CredentialDeserializer.class) public class Credential { /** The Databricks account ID that hosts the credential. */ - @JsonProperty("account_id") private String accountId; /** */ - @JsonProperty("aws_credentials") private AwsCredentials awsCredentials; /** Time in epoch milliseconds when the credential was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Databricks credential configuration ID. */ - @JsonProperty("credentials_id") private String credentialsId; /** The human-readable name of the credential configuration object. */ - @JsonProperty("credentials_name") private String credentialsName; public Credential setAccountId(String accountId) { @@ -101,4 +107,45 @@ public String toString() { .add("credentialsName", credentialsName) .toString(); } + + CredentialPb toPb() { + CredentialPb pb = new CredentialPb(); + pb.setAccountId(accountId); + pb.setAwsCredentials(awsCredentials); + pb.setCreationTime(creationTime); + pb.setCredentialsId(credentialsId); + pb.setCredentialsName(credentialsName); + + return pb; + } + + static Credential fromPb(CredentialPb pb) { + Credential model = new Credential(); + model.setAccountId(pb.getAccountId()); + model.setAwsCredentials(pb.getAwsCredentials()); + model.setCreationTime(pb.getCreationTime()); + model.setCredentialsId(pb.getCredentialsId()); + model.setCredentialsName(pb.getCredentialsName()); + + return model; + } + + public static class CredentialSerializer extends JsonSerializer { + @Override + public void serialize(Credential value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CredentialPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CredentialDeserializer extends JsonDeserializer { + @Override + public Credential deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CredentialPb pb = mapper.readValue(p, CredentialPb.class); + return Credential.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialPb.java new file mode 100755 index 000000000..e93fb8b15 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CredentialPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("aws_credentials") + private AwsCredentials awsCredentials; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("credentials_name") + private String credentialsName; + + public CredentialPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CredentialPb setAwsCredentials(AwsCredentials awsCredentials) { + this.awsCredentials = awsCredentials; + return this; + } + + public AwsCredentials getAwsCredentials() { + return awsCredentials; + } + + public CredentialPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public CredentialPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public CredentialPb setCredentialsName(String credentialsName) { + this.credentialsName = credentialsName; + return this; + } + + public String getCredentialsName() { + return credentialsName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialPb that = (CredentialPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(awsCredentials, that.awsCredentials) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(credentialsName, that.credentialsName); + } + + @Override + public int hashCode() { + return Objects.hash(accountId, awsCredentials, creationTime, credentialsId, credentialsName); + } + + @Override + public String toString() { + return new ToStringer(CredentialPb.class) + .add("accountId", accountId) + .add("awsCredentials", awsCredentials) + .add("creationTime", creationTime) + .add("credentialsId", credentialsId) + .add("credentialsName", credentialsName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java index 581cd163a..895871f41 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java @@ -23,7 +23,7 @@ public Credential create(CreateCredentialRequest request) { String.format("/api/2.0/accounts/%s/credentials", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Credential.class); @@ -40,7 +40,7 @@ public void delete(DeleteCredentialRequest request) { apiClient.configuredAccountID(), request.getCredentialsId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public Credential get(GetCredentialRequest request) { apiClient.configuredAccountID(), request.getCredentialsId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Credential.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java index 5781e69d3..e7b623d26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java @@ -4,17 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The general workspace configurations that are specific to Google Cloud. */ @Generated +@JsonSerialize( + using = + CustomerFacingGcpCloudResourceContainer.CustomerFacingGcpCloudResourceContainerSerializer + .class) +@JsonDeserialize( + using = + CustomerFacingGcpCloudResourceContainer.CustomerFacingGcpCloudResourceContainerDeserializer + .class) public class CustomerFacingGcpCloudResourceContainer { /** * The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your * workspace. */ - @JsonProperty("project_id") private String projectId; public CustomerFacingGcpCloudResourceContainer setProjectId(String projectId) { @@ -45,4 +61,45 @@ public String toString() { .add("projectId", projectId) .toString(); } + + CustomerFacingGcpCloudResourceContainerPb toPb() { + CustomerFacingGcpCloudResourceContainerPb pb = new CustomerFacingGcpCloudResourceContainerPb(); + pb.setProjectId(projectId); + + return pb; + } + + static CustomerFacingGcpCloudResourceContainer fromPb( + CustomerFacingGcpCloudResourceContainerPb pb) { + CustomerFacingGcpCloudResourceContainer model = new CustomerFacingGcpCloudResourceContainer(); + model.setProjectId(pb.getProjectId()); + + return model; + } + + public static class CustomerFacingGcpCloudResourceContainerSerializer + extends JsonSerializer { + @Override + public void serialize( + CustomerFacingGcpCloudResourceContainer value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CustomerFacingGcpCloudResourceContainerPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomerFacingGcpCloudResourceContainerDeserializer + extends JsonDeserializer { + @Override + public CustomerFacingGcpCloudResourceContainer deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomerFacingGcpCloudResourceContainerPb pb = + mapper.readValue(p, CustomerFacingGcpCloudResourceContainerPb.class); + return CustomerFacingGcpCloudResourceContainer.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainerPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainerPb.java new file mode 100755 index 000000000..4b34327df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainerPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The general workspace configurations that are specific to Google Cloud. */ +@Generated +class CustomerFacingGcpCloudResourceContainerPb { + @JsonProperty("project_id") + private String projectId; + + public CustomerFacingGcpCloudResourceContainerPb setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingGcpCloudResourceContainerPb that = (CustomerFacingGcpCloudResourceContainerPb) o; + return Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(projectId); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingGcpCloudResourceContainerPb.class) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java index 40da65d77..259926e2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CustomerManagedKey.CustomerManagedKeySerializer.class) +@JsonDeserialize(using = CustomerManagedKey.CustomerManagedKeyDeserializer.class) public class CustomerManagedKey { /** The Databricks account ID that holds the customer-managed key. */ - @JsonProperty("account_id") private String accountId; /** */ - @JsonProperty("aws_key_info") private AwsKeyInfo awsKeyInfo; /** Time in epoch milliseconds when the customer key was created. */ - @JsonProperty("creation_time") private Long creationTime; /** ID of the encryption key configuration object. */ - @JsonProperty("customer_managed_key_id") private String customerManagedKeyId; /** */ - @JsonProperty("gcp_key_info") private GcpKeyInfo gcpKeyInfo; /** The cases that the key can be used for. */ - @JsonProperty("use_cases") private Collection useCases; public CustomerManagedKey setAccountId(String accountId) { @@ -118,4 +123,48 @@ public String toString() { .add("useCases", useCases) .toString(); } + + CustomerManagedKeyPb toPb() { + CustomerManagedKeyPb pb = new CustomerManagedKeyPb(); + pb.setAccountId(accountId); + pb.setAwsKeyInfo(awsKeyInfo); + pb.setCreationTime(creationTime); + pb.setCustomerManagedKeyId(customerManagedKeyId); + pb.setGcpKeyInfo(gcpKeyInfo); + pb.setUseCases(useCases); + + return pb; + } + + static CustomerManagedKey fromPb(CustomerManagedKeyPb pb) { + CustomerManagedKey model = new CustomerManagedKey(); + model.setAccountId(pb.getAccountId()); + model.setAwsKeyInfo(pb.getAwsKeyInfo()); + model.setCreationTime(pb.getCreationTime()); + model.setCustomerManagedKeyId(pb.getCustomerManagedKeyId()); + model.setGcpKeyInfo(pb.getGcpKeyInfo()); + model.setUseCases(pb.getUseCases()); + + return model; + } + + public static class CustomerManagedKeySerializer extends JsonSerializer { + @Override + public void serialize(CustomerManagedKey value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CustomerManagedKeyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomerManagedKeyDeserializer extends JsonDeserializer { + @Override + public CustomerManagedKey deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomerManagedKeyPb pb = mapper.readValue(p, CustomerManagedKeyPb.class); + return CustomerManagedKey.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKeyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKeyPb.java new file mode 100755 index 000000000..70babe72e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKeyPb.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CustomerManagedKeyPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("aws_key_info") + private AwsKeyInfo awsKeyInfo; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("customer_managed_key_id") + private String customerManagedKeyId; + + @JsonProperty("gcp_key_info") + private GcpKeyInfo gcpKeyInfo; + + @JsonProperty("use_cases") + private Collection useCases; + + public CustomerManagedKeyPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CustomerManagedKeyPb setAwsKeyInfo(AwsKeyInfo awsKeyInfo) { + this.awsKeyInfo = awsKeyInfo; + return this; + } + + public AwsKeyInfo getAwsKeyInfo() { + return awsKeyInfo; + } + + public CustomerManagedKeyPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public CustomerManagedKeyPb setCustomerManagedKeyId(String customerManagedKeyId) { + this.customerManagedKeyId = customerManagedKeyId; + return this; + } + + public String getCustomerManagedKeyId() { + return customerManagedKeyId; + } + + public CustomerManagedKeyPb setGcpKeyInfo(GcpKeyInfo gcpKeyInfo) { + this.gcpKeyInfo = gcpKeyInfo; + return this; + } + + public GcpKeyInfo getGcpKeyInfo() { + return gcpKeyInfo; + } + + public CustomerManagedKeyPb setUseCases(Collection useCases) { + this.useCases = useCases; + return this; + } + + public Collection getUseCases() { + return useCases; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerManagedKeyPb that = (CustomerManagedKeyPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(awsKeyInfo, that.awsKeyInfo) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(customerManagedKeyId, that.customerManagedKeyId) + && Objects.equals(gcpKeyInfo, that.gcpKeyInfo) + && Objects.equals(useCases, that.useCases); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, awsKeyInfo, creationTime, customerManagedKeyId, gcpKeyInfo, useCases); + } + + @Override + public String toString() { + return new ToStringer(CustomerManagedKeyPb.class) + .add("accountId", accountId) + .add("awsKeyInfo", awsKeyInfo) + .add("creationTime", creationTime) + .add("customerManagedKeyId", customerManagedKeyId) + .add("gcpKeyInfo", gcpKeyInfo) + .add("useCases", useCases) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java index 35caca7ed..287b0204c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete credential configuration */ @Generated +@JsonSerialize(using = DeleteCredentialRequest.DeleteCredentialRequestSerializer.class) +@JsonDeserialize(using = DeleteCredentialRequest.DeleteCredentialRequestDeserializer.class) public class DeleteCredentialRequest { /** Databricks Account API credential configuration ID */ - @JsonIgnore private String credentialsId; + private String credentialsId; public DeleteCredentialRequest setCredentialsId(String credentialsId) { this.credentialsId = credentialsId; @@ -41,4 +52,41 @@ public String toString() { .add("credentialsId", credentialsId) .toString(); } + + DeleteCredentialRequestPb toPb() { + DeleteCredentialRequestPb pb = new DeleteCredentialRequestPb(); + pb.setCredentialsId(credentialsId); + + return pb; + } + + static DeleteCredentialRequest fromPb(DeleteCredentialRequestPb pb) { + DeleteCredentialRequest model = new DeleteCredentialRequest(); + model.setCredentialsId(pb.getCredentialsId()); + + return model; + } + + public static class DeleteCredentialRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCredentialRequestPb pb = mapper.readValue(p, DeleteCredentialRequestPb.class); + return DeleteCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequestPb.java new file mode 100755 index 000000000..b3b71a087 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete credential configuration */ +@Generated +class DeleteCredentialRequestPb { + @JsonIgnore private String credentialsId; + + public DeleteCredentialRequestPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCredentialRequestPb that = (DeleteCredentialRequestPb) o; + return Objects.equals(credentialsId, that.credentialsId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialsId); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialRequestPb.class) + .add("credentialsId", credentialsId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java index 9f751cc88..20d5ab13b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete encryption key configuration */ @Generated +@JsonSerialize(using = DeleteEncryptionKeyRequest.DeleteEncryptionKeyRequestSerializer.class) +@JsonDeserialize(using = DeleteEncryptionKeyRequest.DeleteEncryptionKeyRequestDeserializer.class) public class DeleteEncryptionKeyRequest { /** Databricks encryption key configuration ID. */ - @JsonIgnore private String customerManagedKeyId; + private String customerManagedKeyId; public DeleteEncryptionKeyRequest setCustomerManagedKeyId(String customerManagedKeyId) { this.customerManagedKeyId = customerManagedKeyId; @@ -41,4 +52,41 @@ public String toString() { .add("customerManagedKeyId", customerManagedKeyId) .toString(); } + + DeleteEncryptionKeyRequestPb toPb() { + DeleteEncryptionKeyRequestPb pb = new DeleteEncryptionKeyRequestPb(); + pb.setCustomerManagedKeyId(customerManagedKeyId); + + return pb; + } + + static DeleteEncryptionKeyRequest fromPb(DeleteEncryptionKeyRequestPb pb) { + DeleteEncryptionKeyRequest model = new DeleteEncryptionKeyRequest(); + model.setCustomerManagedKeyId(pb.getCustomerManagedKeyId()); + + return model; + } + + public static class DeleteEncryptionKeyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteEncryptionKeyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteEncryptionKeyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteEncryptionKeyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteEncryptionKeyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteEncryptionKeyRequestPb pb = mapper.readValue(p, DeleteEncryptionKeyRequestPb.class); + return DeleteEncryptionKeyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequestPb.java new file mode 100755 index 000000000..221a4fb5f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete encryption key configuration */ +@Generated +class DeleteEncryptionKeyRequestPb { + @JsonIgnore private String customerManagedKeyId; + + public DeleteEncryptionKeyRequestPb setCustomerManagedKeyId(String customerManagedKeyId) { + this.customerManagedKeyId = customerManagedKeyId; + return this; + } + + public String getCustomerManagedKeyId() { + return customerManagedKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteEncryptionKeyRequestPb that = (DeleteEncryptionKeyRequestPb) o; + return Objects.equals(customerManagedKeyId, that.customerManagedKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(customerManagedKeyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteEncryptionKeyRequestPb.class) + .add("customerManagedKeyId", customerManagedKeyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java index c40ea86cd..99a07b559 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a network configuration */ @Generated +@JsonSerialize(using = DeleteNetworkRequest.DeleteNetworkRequestSerializer.class) +@JsonDeserialize(using = DeleteNetworkRequest.DeleteNetworkRequestDeserializer.class) public class DeleteNetworkRequest { /** Databricks Account API network configuration ID. */ - @JsonIgnore private String networkId; + private String networkId; public DeleteNetworkRequest setNetworkId(String networkId) { this.networkId = networkId; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteNetworkRequest.class).add("networkId", networkId).toString(); } + + DeleteNetworkRequestPb toPb() { + DeleteNetworkRequestPb pb = new DeleteNetworkRequestPb(); + pb.setNetworkId(networkId); + + return pb; + } + + static DeleteNetworkRequest fromPb(DeleteNetworkRequestPb pb) { + DeleteNetworkRequest model = new DeleteNetworkRequest(); + model.setNetworkId(pb.getNetworkId()); + + return model; + } + + public static class DeleteNetworkRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteNetworkRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteNetworkRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNetworkRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteNetworkRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNetworkRequestPb pb = mapper.readValue(p, DeleteNetworkRequestPb.class); + return DeleteNetworkRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequestPb.java new file mode 100755 index 000000000..b14de3337 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a network configuration */ +@Generated +class DeleteNetworkRequestPb { + @JsonIgnore private String networkId; + + public DeleteNetworkRequestPb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteNetworkRequestPb that = (DeleteNetworkRequestPb) o; + return Objects.equals(networkId, that.networkId); + } + + @Override + public int hashCode() { + return Objects.hash(networkId); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkRequestPb.class).add("networkId", networkId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java index 64da3675e..089580b71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a private access settings object */ @Generated +@JsonSerialize(using = DeletePrivateAccesRequest.DeletePrivateAccesRequestSerializer.class) +@JsonDeserialize(using = DeletePrivateAccesRequest.DeletePrivateAccesRequestDeserializer.class) public class DeletePrivateAccesRequest { /** Databricks Account API private access settings ID. */ - @JsonIgnore private String privateAccessSettingsId; + private String privateAccessSettingsId; public DeletePrivateAccesRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { this.privateAccessSettingsId = privateAccessSettingsId; @@ -41,4 +52,41 @@ public String toString() { .add("privateAccessSettingsId", privateAccessSettingsId) .toString(); } + + DeletePrivateAccesRequestPb toPb() { + DeletePrivateAccesRequestPb pb = new DeletePrivateAccesRequestPb(); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + + return pb; + } + + static DeletePrivateAccesRequest fromPb(DeletePrivateAccesRequestPb pb) { + DeletePrivateAccesRequest model = new DeletePrivateAccesRequest(); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + + return model; + } + + public static class DeletePrivateAccesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePrivateAccesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePrivateAccesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePrivateAccesRequestDeserializer + extends JsonDeserializer { + @Override + public DeletePrivateAccesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePrivateAccesRequestPb pb = mapper.readValue(p, DeletePrivateAccesRequestPb.class); + return DeletePrivateAccesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequestPb.java new file mode 100755 index 000000000..24718cb47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a private access settings object */ +@Generated +class DeletePrivateAccesRequestPb { + @JsonIgnore private String privateAccessSettingsId; + + public DeletePrivateAccesRequestPb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePrivateAccesRequestPb that = (DeletePrivateAccesRequestPb) o; + return Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId); + } + + @Override + public int hashCode() { + return Objects.hash(privateAccessSettingsId); + } + + @Override + public String toString() { + return new ToStringer(DeletePrivateAccesRequestPb.class) + .add("privateAccessSettingsId", privateAccessSettingsId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java index 053e59bd5..dfd335a50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponsePb.java new file mode 100755 index 000000000..ecc7203aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java index 8b337259c..82c6237ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete storage configuration */ @Generated +@JsonSerialize(using = DeleteStorageRequest.DeleteStorageRequestSerializer.class) +@JsonDeserialize(using = DeleteStorageRequest.DeleteStorageRequestDeserializer.class) public class DeleteStorageRequest { /** Databricks Account API storage configuration ID. */ - @JsonIgnore private String storageConfigurationId; + private String storageConfigurationId; public DeleteStorageRequest setStorageConfigurationId(String storageConfigurationId) { this.storageConfigurationId = storageConfigurationId; @@ -41,4 +52,40 @@ public String toString() { .add("storageConfigurationId", storageConfigurationId) .toString(); } + + DeleteStorageRequestPb toPb() { + DeleteStorageRequestPb pb = new DeleteStorageRequestPb(); + pb.setStorageConfigurationId(storageConfigurationId); + + return pb; + } + + static DeleteStorageRequest fromPb(DeleteStorageRequestPb pb) { + DeleteStorageRequest model = new DeleteStorageRequest(); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + + return model; + } + + public static class DeleteStorageRequestSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteStorageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteStorageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteStorageRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteStorageRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteStorageRequestPb pb = mapper.readValue(p, DeleteStorageRequestPb.class); + return DeleteStorageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequestPb.java new file mode 100755 index 000000000..9963625ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete storage configuration */ +@Generated +class DeleteStorageRequestPb { + @JsonIgnore private String storageConfigurationId; + + public DeleteStorageRequestPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteStorageRequestPb that = (DeleteStorageRequestPb) o; + return Objects.equals(storageConfigurationId, that.storageConfigurationId); + } + + @Override + public int hashCode() { + return Objects.hash(storageConfigurationId); + } + + @Override + public String toString() { + return new ToStringer(DeleteStorageRequestPb.class) + .add("storageConfigurationId", storageConfigurationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java index c1ecb36ff..cf0f3dd33 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete VPC endpoint configuration */ @Generated +@JsonSerialize(using = DeleteVpcEndpointRequest.DeleteVpcEndpointRequestSerializer.class) +@JsonDeserialize(using = DeleteVpcEndpointRequest.DeleteVpcEndpointRequestDeserializer.class) public class DeleteVpcEndpointRequest { /** Databricks VPC endpoint ID. */ - @JsonIgnore private String vpcEndpointId; + private String vpcEndpointId; public DeleteVpcEndpointRequest setVpcEndpointId(String vpcEndpointId) { this.vpcEndpointId = vpcEndpointId; @@ -41,4 +52,41 @@ public String toString() { .add("vpcEndpointId", vpcEndpointId) .toString(); } + + DeleteVpcEndpointRequestPb toPb() { + DeleteVpcEndpointRequestPb pb = new DeleteVpcEndpointRequestPb(); + pb.setVpcEndpointId(vpcEndpointId); + + return pb; + } + + static DeleteVpcEndpointRequest fromPb(DeleteVpcEndpointRequestPb pb) { + DeleteVpcEndpointRequest model = new DeleteVpcEndpointRequest(); + model.setVpcEndpointId(pb.getVpcEndpointId()); + + return model; + } + + public static class DeleteVpcEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteVpcEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteVpcEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteVpcEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteVpcEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteVpcEndpointRequestPb pb = mapper.readValue(p, DeleteVpcEndpointRequestPb.class); + return DeleteVpcEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequestPb.java new file mode 100755 index 000000000..61b877f6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete VPC endpoint configuration */ +@Generated +class DeleteVpcEndpointRequestPb { + @JsonIgnore private String vpcEndpointId; + + public DeleteVpcEndpointRequestPb setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteVpcEndpointRequestPb that = (DeleteVpcEndpointRequestPb) o; + return Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash(vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(DeleteVpcEndpointRequestPb.class) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java index a5d7ee825..725f1addf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a workspace */ @Generated +@JsonSerialize(using = DeleteWorkspaceRequest.DeleteWorkspaceRequestSerializer.class) +@JsonDeserialize(using = DeleteWorkspaceRequest.DeleteWorkspaceRequestDeserializer.class) public class DeleteWorkspaceRequest { /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public DeleteWorkspaceRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWorkspaceRequest.class).add("workspaceId", workspaceId).toString(); } + + DeleteWorkspaceRequestPb toPb() { + DeleteWorkspaceRequestPb pb = new DeleteWorkspaceRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static DeleteWorkspaceRequest fromPb(DeleteWorkspaceRequestPb pb) { + DeleteWorkspaceRequest model = new DeleteWorkspaceRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class DeleteWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWorkspaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteWorkspaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWorkspaceRequestPb pb = mapper.readValue(p, DeleteWorkspaceRequestPb.class); + return DeleteWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequestPb.java new file mode 100755 index 000000000..2bc5ae3a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a workspace */ +@Generated +class DeleteWorkspaceRequestPb { + @JsonIgnore private Long workspaceId; + + public DeleteWorkspaceRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWorkspaceRequestPb that = (DeleteWorkspaceRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspaceRequestPb.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java index 8e103d747..94e77233c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java @@ -24,7 +24,7 @@ public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) { "/api/2.0/accounts/%s/customer-managed-keys", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CustomerManagedKey.class); @@ -41,7 +41,7 @@ public void delete(DeleteEncryptionKeyRequest request) { apiClient.configuredAccountID(), request.getCustomerManagedKeyId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public CustomerManagedKey get(GetEncryptionKeyRequest request) { apiClient.configuredAccountID(), request.getCustomerManagedKeyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CustomerManagedKey.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java index 7654c68e7..3381163a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalCustomerInfo.ExternalCustomerInfoSerializer.class) +@JsonDeserialize(using = ExternalCustomerInfo.ExternalCustomerInfoDeserializer.class) public class ExternalCustomerInfo { /** Email of the authoritative user. */ - @JsonProperty("authoritative_user_email") private String authoritativeUserEmail; /** The authoritative user full name. */ - @JsonProperty("authoritative_user_full_name") private String authoritativeUserFullName; /** The legal entity name for the external workspace */ - @JsonProperty("customer_name") private String customerName; public ExternalCustomerInfo setAuthoritativeUserEmail(String authoritativeUserEmail) { @@ -71,4 +79,44 @@ public String toString() { .add("customerName", customerName) .toString(); } + + ExternalCustomerInfoPb toPb() { + ExternalCustomerInfoPb pb = new ExternalCustomerInfoPb(); + pb.setAuthoritativeUserEmail(authoritativeUserEmail); + pb.setAuthoritativeUserFullName(authoritativeUserFullName); + pb.setCustomerName(customerName); + + return pb; + } + + static ExternalCustomerInfo fromPb(ExternalCustomerInfoPb pb) { + ExternalCustomerInfo model = new ExternalCustomerInfo(); + model.setAuthoritativeUserEmail(pb.getAuthoritativeUserEmail()); + model.setAuthoritativeUserFullName(pb.getAuthoritativeUserFullName()); + model.setCustomerName(pb.getCustomerName()); + + return model; + } + + public static class ExternalCustomerInfoSerializer extends JsonSerializer { + @Override + public void serialize( + ExternalCustomerInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalCustomerInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalCustomerInfoDeserializer + extends JsonDeserializer { + @Override + public ExternalCustomerInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalCustomerInfoPb pb = mapper.readValue(p, ExternalCustomerInfoPb.class); + return ExternalCustomerInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfoPb.java new file mode 100755 index 000000000..a453b917d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfoPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalCustomerInfoPb { + @JsonProperty("authoritative_user_email") + private String authoritativeUserEmail; + + @JsonProperty("authoritative_user_full_name") + private String authoritativeUserFullName; + + @JsonProperty("customer_name") + private String customerName; + + public ExternalCustomerInfoPb setAuthoritativeUserEmail(String authoritativeUserEmail) { + this.authoritativeUserEmail = authoritativeUserEmail; + return this; + } + + public String getAuthoritativeUserEmail() { + return authoritativeUserEmail; + } + + public ExternalCustomerInfoPb setAuthoritativeUserFullName(String authoritativeUserFullName) { + this.authoritativeUserFullName = authoritativeUserFullName; + return this; + } + + public String getAuthoritativeUserFullName() { + return authoritativeUserFullName; + } + + public ExternalCustomerInfoPb setCustomerName(String customerName) { + this.customerName = customerName; + return this; + } + + public String getCustomerName() { + return customerName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalCustomerInfoPb that = (ExternalCustomerInfoPb) o; + return Objects.equals(authoritativeUserEmail, that.authoritativeUserEmail) + && Objects.equals(authoritativeUserFullName, that.authoritativeUserFullName) + && Objects.equals(customerName, that.customerName); + } + + @Override + public int hashCode() { + return Objects.hash(authoritativeUserEmail, authoritativeUserFullName, customerName); + } + + @Override + public String toString() { + return new ToStringer(ExternalCustomerInfoPb.class) + .add("authoritativeUserEmail", authoritativeUserEmail) + .add("authoritativeUserFullName", authoritativeUserFullName) + .add("customerName", customerName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java index be5eba082..9b88697b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GcpKeyInfo.GcpKeyInfoSerializer.class) +@JsonDeserialize(using = GcpKeyInfo.GcpKeyInfoDeserializer.class) public class GcpKeyInfo { /** The GCP KMS key's resource name */ - @JsonProperty("kms_key_id") private String kmsKeyId; public GcpKeyInfo setKmsKeyId(String kmsKeyId) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(GcpKeyInfo.class).add("kmsKeyId", kmsKeyId).toString(); } + + GcpKeyInfoPb toPb() { + GcpKeyInfoPb pb = new GcpKeyInfoPb(); + pb.setKmsKeyId(kmsKeyId); + + return pb; + } + + static GcpKeyInfo fromPb(GcpKeyInfoPb pb) { + GcpKeyInfo model = new GcpKeyInfo(); + model.setKmsKeyId(pb.getKmsKeyId()); + + return model; + } + + public static class GcpKeyInfoSerializer extends JsonSerializer { + @Override + public void serialize(GcpKeyInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpKeyInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpKeyInfoDeserializer extends JsonDeserializer { + @Override + public GcpKeyInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpKeyInfoPb pb = mapper.readValue(p, GcpKeyInfoPb.class); + return GcpKeyInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfoPb.java new file mode 100755 index 000000000..24eb319c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfoPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GcpKeyInfoPb { + @JsonProperty("kms_key_id") + private String kmsKeyId; + + public GcpKeyInfoPb setKmsKeyId(String kmsKeyId) { + this.kmsKeyId = kmsKeyId; + return this; + } + + public String getKmsKeyId() { + return kmsKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpKeyInfoPb that = (GcpKeyInfoPb) o; + return Objects.equals(kmsKeyId, that.kmsKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(kmsKeyId); + } + + @Override + public String toString() { + return new ToStringer(GcpKeyInfoPb.class).add("kmsKeyId", kmsKeyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java index e27c531f4..5de53aa34 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -29,26 +38,25 @@ * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html */ @Generated +@JsonSerialize(using = GcpManagedNetworkConfig.GcpManagedNetworkConfigSerializer.class) +@JsonDeserialize(using = GcpManagedNetworkConfig.GcpManagedNetworkConfigDeserializer.class) public class GcpManagedNetworkConfig { /** * The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than * `/21`. */ - @JsonProperty("gke_cluster_pod_ip_range") private String gkeClusterPodIpRange; /** * The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller * than `/27`. */ - @JsonProperty("gke_cluster_service_ip_range") private String gkeClusterServiceIpRange; /** * The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than * `/29`. */ - @JsonProperty("subnet_cidr") private String subnetCidr; public GcpManagedNetworkConfig setGkeClusterPodIpRange(String gkeClusterPodIpRange) { @@ -101,4 +109,45 @@ public String toString() { .add("subnetCidr", subnetCidr) .toString(); } + + GcpManagedNetworkConfigPb toPb() { + GcpManagedNetworkConfigPb pb = new GcpManagedNetworkConfigPb(); + pb.setGkeClusterPodIpRange(gkeClusterPodIpRange); + pb.setGkeClusterServiceIpRange(gkeClusterServiceIpRange); + pb.setSubnetCidr(subnetCidr); + + return pb; + } + + static GcpManagedNetworkConfig fromPb(GcpManagedNetworkConfigPb pb) { + GcpManagedNetworkConfig model = new GcpManagedNetworkConfig(); + model.setGkeClusterPodIpRange(pb.getGkeClusterPodIpRange()); + model.setGkeClusterServiceIpRange(pb.getGkeClusterServiceIpRange()); + model.setSubnetCidr(pb.getSubnetCidr()); + + return model; + } + + public static class GcpManagedNetworkConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + GcpManagedNetworkConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpManagedNetworkConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpManagedNetworkConfigDeserializer + extends JsonDeserializer { + @Override + public GcpManagedNetworkConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpManagedNetworkConfigPb pb = mapper.readValue(p, GcpManagedNetworkConfigPb.class); + return GcpManagedNetworkConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfigPb.java new file mode 100755 index 000000000..d59ded41c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfigPb.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The network settings for the workspace. The configurations are only for Databricks-managed VPCs. + * It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range + * configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks + * detects an IP range overlap. + * + *

Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and + * all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, + * `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. + * + *

The sizes of these IP ranges affect the maximum number of nodes for the workspace. + * + *

**Important**: Confirm the IP ranges used by your Databricks workspace before creating the + * workspace. You cannot change them after your workspace is deployed. If the IP address ranges for + * your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To + * determine the address range sizes that you need, Databricks provides a calculator as a Microsoft + * Excel spreadsheet. See [calculate subnet sizes for a new workspace]. + * + *

[calculate subnet sizes for a new workspace]: + * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html + */ +@Generated +class GcpManagedNetworkConfigPb { + @JsonProperty("gke_cluster_pod_ip_range") + private String gkeClusterPodIpRange; + + @JsonProperty("gke_cluster_service_ip_range") + private String gkeClusterServiceIpRange; + + @JsonProperty("subnet_cidr") + private String subnetCidr; + + public GcpManagedNetworkConfigPb setGkeClusterPodIpRange(String gkeClusterPodIpRange) { + this.gkeClusterPodIpRange = gkeClusterPodIpRange; + return this; + } + + public String getGkeClusterPodIpRange() { + return gkeClusterPodIpRange; + } + + public GcpManagedNetworkConfigPb setGkeClusterServiceIpRange(String gkeClusterServiceIpRange) { + this.gkeClusterServiceIpRange = gkeClusterServiceIpRange; + return this; + } + + public String getGkeClusterServiceIpRange() { + return gkeClusterServiceIpRange; + } + + public GcpManagedNetworkConfigPb setSubnetCidr(String subnetCidr) { + this.subnetCidr = subnetCidr; + return this; + } + + public String getSubnetCidr() { + return subnetCidr; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpManagedNetworkConfigPb that = (GcpManagedNetworkConfigPb) o; + return Objects.equals(gkeClusterPodIpRange, that.gkeClusterPodIpRange) + && Objects.equals(gkeClusterServiceIpRange, that.gkeClusterServiceIpRange) + && Objects.equals(subnetCidr, that.subnetCidr); + } + + @Override + public int hashCode() { + return Objects.hash(gkeClusterPodIpRange, gkeClusterServiceIpRange, subnetCidr); + } + + @Override + public String toString() { + return new ToStringer(GcpManagedNetworkConfigPb.class) + .add("gkeClusterPodIpRange", gkeClusterPodIpRange) + .add("gkeClusterServiceIpRange", gkeClusterServiceIpRange) + .add("subnetCidr", subnetCidr) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java index 08f497789..f774a09cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,38 +21,34 @@ * secondary IP ranges). */ @Generated +@JsonSerialize(using = GcpNetworkInfo.GcpNetworkInfoSerializer.class) +@JsonDeserialize(using = GcpNetworkInfo.GcpNetworkInfoDeserializer.class) public class GcpNetworkInfo { /** The Google Cloud project ID of the VPC network. */ - @JsonProperty("network_project_id") private String networkProjectId; /** * The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP * range for its pods. This secondary IP range can be used by only one workspace. */ - @JsonProperty("pod_ip_range_name") private String podIpRangeName; /** * The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP * range for its services. This secondary IP range can be used by only one workspace. */ - @JsonProperty("service_ip_range_name") private String serviceIpRangeName; /** The ID of the subnet associated with this network. */ - @JsonProperty("subnet_id") private String subnetId; /** The Google Cloud region of the workspace data plane (for example, `us-east4`). */ - @JsonProperty("subnet_region") private String subnetRegion; /** * The ID of the VPC associated with this network. VPC IDs can be used in multiple network * configurations. */ - @JsonProperty("vpc_id") private String vpcId; public GcpNetworkInfo setNetworkProjectId(String networkProjectId) { @@ -130,4 +135,48 @@ public String toString() { .add("vpcId", vpcId) .toString(); } + + GcpNetworkInfoPb toPb() { + GcpNetworkInfoPb pb = new GcpNetworkInfoPb(); + pb.setNetworkProjectId(networkProjectId); + pb.setPodIpRangeName(podIpRangeName); + pb.setServiceIpRangeName(serviceIpRangeName); + pb.setSubnetId(subnetId); + pb.setSubnetRegion(subnetRegion); + pb.setVpcId(vpcId); + + return pb; + } + + static GcpNetworkInfo fromPb(GcpNetworkInfoPb pb) { + GcpNetworkInfo model = new GcpNetworkInfo(); + model.setNetworkProjectId(pb.getNetworkProjectId()); + model.setPodIpRangeName(pb.getPodIpRangeName()); + model.setServiceIpRangeName(pb.getServiceIpRangeName()); + model.setSubnetId(pb.getSubnetId()); + model.setSubnetRegion(pb.getSubnetRegion()); + model.setVpcId(pb.getVpcId()); + + return model; + } + + public static class GcpNetworkInfoSerializer extends JsonSerializer { + @Override + public void serialize(GcpNetworkInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpNetworkInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpNetworkInfoDeserializer extends JsonDeserializer { + @Override + public GcpNetworkInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpNetworkInfoPb pb = mapper.readValue(p, GcpNetworkInfoPb.class); + return GcpNetworkInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfoPb.java new file mode 100755 index 000000000..9fdb3d20f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfoPb.java @@ -0,0 +1,118 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and + * secondary IP ranges). + */ +@Generated +class GcpNetworkInfoPb { + @JsonProperty("network_project_id") + private String networkProjectId; + + @JsonProperty("pod_ip_range_name") + private String podIpRangeName; + + @JsonProperty("service_ip_range_name") + private String serviceIpRangeName; + + @JsonProperty("subnet_id") + private String subnetId; + + @JsonProperty("subnet_region") + private String subnetRegion; + + @JsonProperty("vpc_id") + private String vpcId; + + public GcpNetworkInfoPb setNetworkProjectId(String networkProjectId) { + this.networkProjectId = networkProjectId; + return this; + } + + public String getNetworkProjectId() { + return networkProjectId; + } + + public GcpNetworkInfoPb setPodIpRangeName(String podIpRangeName) { + this.podIpRangeName = podIpRangeName; + return this; + } + + public String getPodIpRangeName() { + return podIpRangeName; + } + + public GcpNetworkInfoPb setServiceIpRangeName(String serviceIpRangeName) { + this.serviceIpRangeName = serviceIpRangeName; + return this; + } + + public String getServiceIpRangeName() { + return serviceIpRangeName; + } + + public GcpNetworkInfoPb setSubnetId(String subnetId) { + this.subnetId = subnetId; + return this; + } + + public String getSubnetId() { + return subnetId; + } + + public GcpNetworkInfoPb setSubnetRegion(String subnetRegion) { + this.subnetRegion = subnetRegion; + return this; + } + + public String getSubnetRegion() { + return subnetRegion; + } + + public GcpNetworkInfoPb setVpcId(String vpcId) { + this.vpcId = vpcId; + return this; + } + + public String getVpcId() { + return vpcId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpNetworkInfoPb that = (GcpNetworkInfoPb) o; + return Objects.equals(networkProjectId, that.networkProjectId) + && Objects.equals(podIpRangeName, that.podIpRangeName) + && Objects.equals(serviceIpRangeName, that.serviceIpRangeName) + && Objects.equals(subnetId, that.subnetId) + && Objects.equals(subnetRegion, that.subnetRegion) + && Objects.equals(vpcId, that.vpcId); + } + + @Override + public int hashCode() { + return Objects.hash( + networkProjectId, podIpRangeName, serviceIpRangeName, subnetId, subnetRegion, vpcId); + } + + @Override + public String toString() { + return new ToStringer(GcpNetworkInfoPb.class) + .add("networkProjectId", networkProjectId) + .add("podIpRangeName", podIpRangeName) + .add("serviceIpRangeName", serviceIpRangeName) + .add("subnetId", subnetId) + .add("subnetRegion", subnetRegion) + .add("vpcId", vpcId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java index 3298b72bf..70e08c2e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The Google Cloud specific information for this Private Service Connect endpoint. */ @Generated +@JsonSerialize(using = GcpVpcEndpointInfo.GcpVpcEndpointInfoSerializer.class) +@JsonDeserialize(using = GcpVpcEndpointInfo.GcpVpcEndpointInfoDeserializer.class) public class GcpVpcEndpointInfo { /** Region of the PSC endpoint. */ - @JsonProperty("endpoint_region") private String endpointRegion; /** The Google Cloud project ID of the VPC network where the PSC connection resides. */ - @JsonProperty("project_id") private String projectId; /** The unique ID of this PSC connection. */ - @JsonProperty("psc_connection_id") private String pscConnectionId; /** The name of the PSC endpoint in the Google Cloud project. */ - @JsonProperty("psc_endpoint_name") private String pscEndpointName; /** The service attachment this PSC connection connects to. */ - @JsonProperty("service_attachment_id") private String serviceAttachmentId; public GcpVpcEndpointInfo setEndpointRegion(String endpointRegion) { @@ -103,4 +109,46 @@ public String toString() { .add("serviceAttachmentId", serviceAttachmentId) .toString(); } + + GcpVpcEndpointInfoPb toPb() { + GcpVpcEndpointInfoPb pb = new GcpVpcEndpointInfoPb(); + pb.setEndpointRegion(endpointRegion); + pb.setProjectId(projectId); + pb.setPscConnectionId(pscConnectionId); + pb.setPscEndpointName(pscEndpointName); + pb.setServiceAttachmentId(serviceAttachmentId); + + return pb; + } + + static GcpVpcEndpointInfo fromPb(GcpVpcEndpointInfoPb pb) { + GcpVpcEndpointInfo model = new GcpVpcEndpointInfo(); + model.setEndpointRegion(pb.getEndpointRegion()); + model.setProjectId(pb.getProjectId()); + model.setPscConnectionId(pb.getPscConnectionId()); + model.setPscEndpointName(pb.getPscEndpointName()); + model.setServiceAttachmentId(pb.getServiceAttachmentId()); + + return model; + } + + public static class GcpVpcEndpointInfoSerializer extends JsonSerializer { + @Override + public void serialize(GcpVpcEndpointInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GcpVpcEndpointInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GcpVpcEndpointInfoDeserializer extends JsonDeserializer { + @Override + public GcpVpcEndpointInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GcpVpcEndpointInfoPb pb = mapper.readValue(p, GcpVpcEndpointInfoPb.class); + return GcpVpcEndpointInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfoPb.java new file mode 100755 index 000000000..8e7f41998 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfoPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Google Cloud specific information for this Private Service Connect endpoint. */ +@Generated +class GcpVpcEndpointInfoPb { + @JsonProperty("endpoint_region") + private String endpointRegion; + + @JsonProperty("project_id") + private String projectId; + + @JsonProperty("psc_connection_id") + private String pscConnectionId; + + @JsonProperty("psc_endpoint_name") + private String pscEndpointName; + + @JsonProperty("service_attachment_id") + private String serviceAttachmentId; + + public GcpVpcEndpointInfoPb setEndpointRegion(String endpointRegion) { + this.endpointRegion = endpointRegion; + return this; + } + + public String getEndpointRegion() { + return endpointRegion; + } + + public GcpVpcEndpointInfoPb setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public GcpVpcEndpointInfoPb setPscConnectionId(String pscConnectionId) { + this.pscConnectionId = pscConnectionId; + return this; + } + + public String getPscConnectionId() { + return pscConnectionId; + } + + public GcpVpcEndpointInfoPb setPscEndpointName(String pscEndpointName) { + this.pscEndpointName = pscEndpointName; + return this; + } + + public String getPscEndpointName() { + return pscEndpointName; + } + + public GcpVpcEndpointInfoPb setServiceAttachmentId(String serviceAttachmentId) { + this.serviceAttachmentId = serviceAttachmentId; + return this; + } + + public String getServiceAttachmentId() { + return serviceAttachmentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpVpcEndpointInfoPb that = (GcpVpcEndpointInfoPb) o; + return Objects.equals(endpointRegion, that.endpointRegion) + && Objects.equals(projectId, that.projectId) + && Objects.equals(pscConnectionId, that.pscConnectionId) + && Objects.equals(pscEndpointName, that.pscEndpointName) + && Objects.equals(serviceAttachmentId, that.serviceAttachmentId); + } + + @Override + public int hashCode() { + return Objects.hash( + endpointRegion, projectId, pscConnectionId, pscEndpointName, serviceAttachmentId); + } + + @Override + public String toString() { + return new ToStringer(GcpVpcEndpointInfoPb.class) + .add("endpointRegion", endpointRegion) + .add("projectId", projectId) + .add("pscConnectionId", pscConnectionId) + .add("pscEndpointName", pscEndpointName) + .add("serviceAttachmentId", serviceAttachmentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java index c7fe22dfa..294284fd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get credential configuration */ @Generated +@JsonSerialize(using = GetCredentialRequest.GetCredentialRequestSerializer.class) +@JsonDeserialize(using = GetCredentialRequest.GetCredentialRequestDeserializer.class) public class GetCredentialRequest { /** Databricks Account API credential configuration ID */ - @JsonIgnore private String credentialsId; + private String credentialsId; public GetCredentialRequest setCredentialsId(String credentialsId) { this.credentialsId = credentialsId; @@ -41,4 +52,40 @@ public String toString() { .add("credentialsId", credentialsId) .toString(); } + + GetCredentialRequestPb toPb() { + GetCredentialRequestPb pb = new GetCredentialRequestPb(); + pb.setCredentialsId(credentialsId); + + return pb; + } + + static GetCredentialRequest fromPb(GetCredentialRequestPb pb) { + GetCredentialRequest model = new GetCredentialRequest(); + model.setCredentialsId(pb.getCredentialsId()); + + return model; + } + + public static class GetCredentialRequestSerializer extends JsonSerializer { + @Override + public void serialize( + GetCredentialRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCredentialRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCredentialRequestDeserializer + extends JsonDeserializer { + @Override + public GetCredentialRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCredentialRequestPb pb = mapper.readValue(p, GetCredentialRequestPb.class); + return GetCredentialRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequestPb.java new file mode 100755 index 000000000..93d3b33f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get credential configuration */ +@Generated +class GetCredentialRequestPb { + @JsonIgnore private String credentialsId; + + public GetCredentialRequestPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialRequestPb that = (GetCredentialRequestPb) o; + return Objects.equals(credentialsId, that.credentialsId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialsId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialRequestPb.class) + .add("credentialsId", credentialsId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequest.java index 2f48fcf65..831f53063 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get encryption key configuration */ @Generated +@JsonSerialize(using = GetEncryptionKeyRequest.GetEncryptionKeyRequestSerializer.class) +@JsonDeserialize(using = GetEncryptionKeyRequest.GetEncryptionKeyRequestDeserializer.class) public class GetEncryptionKeyRequest { /** Databricks encryption key configuration ID. */ - @JsonIgnore private String customerManagedKeyId; + private String customerManagedKeyId; public GetEncryptionKeyRequest setCustomerManagedKeyId(String customerManagedKeyId) { this.customerManagedKeyId = customerManagedKeyId; @@ -41,4 +52,41 @@ public String toString() { .add("customerManagedKeyId", customerManagedKeyId) .toString(); } + + GetEncryptionKeyRequestPb toPb() { + GetEncryptionKeyRequestPb pb = new GetEncryptionKeyRequestPb(); + pb.setCustomerManagedKeyId(customerManagedKeyId); + + return pb; + } + + static GetEncryptionKeyRequest fromPb(GetEncryptionKeyRequestPb pb) { + GetEncryptionKeyRequest model = new GetEncryptionKeyRequest(); + model.setCustomerManagedKeyId(pb.getCustomerManagedKeyId()); + + return model; + } + + public static class GetEncryptionKeyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetEncryptionKeyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEncryptionKeyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEncryptionKeyRequestDeserializer + extends JsonDeserializer { + @Override + public GetEncryptionKeyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEncryptionKeyRequestPb pb = mapper.readValue(p, GetEncryptionKeyRequestPb.class); + return GetEncryptionKeyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequestPb.java new file mode 100755 index 000000000..3de40bdaa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetEncryptionKeyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get encryption key configuration */ +@Generated +class GetEncryptionKeyRequestPb { + @JsonIgnore private String customerManagedKeyId; + + public GetEncryptionKeyRequestPb setCustomerManagedKeyId(String customerManagedKeyId) { + this.customerManagedKeyId = customerManagedKeyId; + return this; + } + + public String getCustomerManagedKeyId() { + return customerManagedKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEncryptionKeyRequestPb that = (GetEncryptionKeyRequestPb) o; + return Objects.equals(customerManagedKeyId, that.customerManagedKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(customerManagedKeyId); + } + + @Override + public String toString() { + return new ToStringer(GetEncryptionKeyRequestPb.class) + .add("customerManagedKeyId", customerManagedKeyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequest.java index 1627a7114..3e2df6383 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a network configuration */ @Generated +@JsonSerialize(using = GetNetworkRequest.GetNetworkRequestSerializer.class) +@JsonDeserialize(using = GetNetworkRequest.GetNetworkRequestDeserializer.class) public class GetNetworkRequest { /** Databricks Account API network configuration ID. */ - @JsonIgnore private String networkId; + private String networkId; public GetNetworkRequest setNetworkId(String networkId) { this.networkId = networkId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetNetworkRequest.class).add("networkId", networkId).toString(); } + + GetNetworkRequestPb toPb() { + GetNetworkRequestPb pb = new GetNetworkRequestPb(); + pb.setNetworkId(networkId); + + return pb; + } + + static GetNetworkRequest fromPb(GetNetworkRequestPb pb) { + GetNetworkRequest model = new GetNetworkRequest(); + model.setNetworkId(pb.getNetworkId()); + + return model; + } + + public static class GetNetworkRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetNetworkRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetNetworkRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetNetworkRequestDeserializer extends JsonDeserializer { + @Override + public GetNetworkRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetNetworkRequestPb pb = mapper.readValue(p, GetNetworkRequestPb.class); + return GetNetworkRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequestPb.java new file mode 100755 index 000000000..93c9af5e2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetNetworkRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a network configuration */ +@Generated +class GetNetworkRequestPb { + @JsonIgnore private String networkId; + + public GetNetworkRequestPb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetNetworkRequestPb that = (GetNetworkRequestPb) o; + return Objects.equals(networkId, that.networkId); + } + + @Override + public int hashCode() { + return Objects.hash(networkId); + } + + @Override + public String toString() { + return new ToStringer(GetNetworkRequestPb.class).add("networkId", networkId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java index 7817a63f5..c4bacc131 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a private access settings object */ @Generated +@JsonSerialize(using = GetPrivateAccesRequest.GetPrivateAccesRequestSerializer.class) +@JsonDeserialize(using = GetPrivateAccesRequest.GetPrivateAccesRequestDeserializer.class) public class GetPrivateAccesRequest { /** Databricks Account API private access settings ID. */ - @JsonIgnore private String privateAccessSettingsId; + private String privateAccessSettingsId; public GetPrivateAccesRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { this.privateAccessSettingsId = privateAccessSettingsId; @@ -41,4 +52,41 @@ public String toString() { .add("privateAccessSettingsId", privateAccessSettingsId) .toString(); } + + GetPrivateAccesRequestPb toPb() { + GetPrivateAccesRequestPb pb = new GetPrivateAccesRequestPb(); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + + return pb; + } + + static GetPrivateAccesRequest fromPb(GetPrivateAccesRequestPb pb) { + GetPrivateAccesRequest model = new GetPrivateAccesRequest(); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + + return model; + } + + public static class GetPrivateAccesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPrivateAccesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPrivateAccesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPrivateAccesRequestDeserializer + extends JsonDeserializer { + @Override + public GetPrivateAccesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPrivateAccesRequestPb pb = mapper.readValue(p, GetPrivateAccesRequestPb.class); + return GetPrivateAccesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequestPb.java new file mode 100755 index 000000000..2ef7ad45f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a private access settings object */ +@Generated +class GetPrivateAccesRequestPb { + @JsonIgnore private String privateAccessSettingsId; + + public GetPrivateAccesRequestPb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPrivateAccesRequestPb that = (GetPrivateAccesRequestPb) o; + return Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId); + } + + @Override + public int hashCode() { + return Objects.hash(privateAccessSettingsId); + } + + @Override + public String toString() { + return new ToStringer(GetPrivateAccesRequestPb.class) + .add("privateAccessSettingsId", privateAccessSettingsId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java index aaa9f9050..a919e9687 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get storage configuration */ @Generated +@JsonSerialize(using = GetStorageRequest.GetStorageRequestSerializer.class) +@JsonDeserialize(using = GetStorageRequest.GetStorageRequestDeserializer.class) public class GetStorageRequest { /** Databricks Account API storage configuration ID. */ - @JsonIgnore private String storageConfigurationId; + private String storageConfigurationId; public GetStorageRequest setStorageConfigurationId(String storageConfigurationId) { this.storageConfigurationId = storageConfigurationId; @@ -41,4 +52,38 @@ public String toString() { .add("storageConfigurationId", storageConfigurationId) .toString(); } + + GetStorageRequestPb toPb() { + GetStorageRequestPb pb = new GetStorageRequestPb(); + pb.setStorageConfigurationId(storageConfigurationId); + + return pb; + } + + static GetStorageRequest fromPb(GetStorageRequestPb pb) { + GetStorageRequest model = new GetStorageRequest(); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + + return model; + } + + public static class GetStorageRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetStorageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStorageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStorageRequestDeserializer extends JsonDeserializer { + @Override + public GetStorageRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStorageRequestPb pb = mapper.readValue(p, GetStorageRequestPb.class); + return GetStorageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequestPb.java new file mode 100755 index 000000000..a148495b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get storage configuration */ +@Generated +class GetStorageRequestPb { + @JsonIgnore private String storageConfigurationId; + + public GetStorageRequestPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStorageRequestPb that = (GetStorageRequestPb) o; + return Objects.equals(storageConfigurationId, that.storageConfigurationId); + } + + @Override + public int hashCode() { + return Objects.hash(storageConfigurationId); + } + + @Override + public String toString() { + return new ToStringer(GetStorageRequestPb.class) + .add("storageConfigurationId", storageConfigurationId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequest.java index 4bd83e796..bbab8bbdc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a VPC endpoint configuration */ @Generated +@JsonSerialize(using = GetVpcEndpointRequest.GetVpcEndpointRequestSerializer.class) +@JsonDeserialize(using = GetVpcEndpointRequest.GetVpcEndpointRequestDeserializer.class) public class GetVpcEndpointRequest { /** Databricks VPC endpoint ID. */ - @JsonIgnore private String vpcEndpointId; + private String vpcEndpointId; public GetVpcEndpointRequest setVpcEndpointId(String vpcEndpointId) { this.vpcEndpointId = vpcEndpointId; @@ -41,4 +52,41 @@ public String toString() { .add("vpcEndpointId", vpcEndpointId) .toString(); } + + GetVpcEndpointRequestPb toPb() { + GetVpcEndpointRequestPb pb = new GetVpcEndpointRequestPb(); + pb.setVpcEndpointId(vpcEndpointId); + + return pb; + } + + static GetVpcEndpointRequest fromPb(GetVpcEndpointRequestPb pb) { + GetVpcEndpointRequest model = new GetVpcEndpointRequest(); + model.setVpcEndpointId(pb.getVpcEndpointId()); + + return model; + } + + public static class GetVpcEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetVpcEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetVpcEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetVpcEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public GetVpcEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetVpcEndpointRequestPb pb = mapper.readValue(p, GetVpcEndpointRequestPb.class); + return GetVpcEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequestPb.java new file mode 100755 index 000000000..1d2103f81 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpcEndpointRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a VPC endpoint configuration */ +@Generated +class GetVpcEndpointRequestPb { + @JsonIgnore private String vpcEndpointId; + + public GetVpcEndpointRequestPb setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetVpcEndpointRequestPb that = (GetVpcEndpointRequestPb) o; + return Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash(vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(GetVpcEndpointRequestPb.class) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java index b118a8bf7..39a918f5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a workspace */ @Generated +@JsonSerialize(using = GetWorkspaceRequest.GetWorkspaceRequestSerializer.class) +@JsonDeserialize(using = GetWorkspaceRequest.GetWorkspaceRequestDeserializer.class) public class GetWorkspaceRequest { /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public GetWorkspaceRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetWorkspaceRequest.class).add("workspaceId", workspaceId).toString(); } + + GetWorkspaceRequestPb toPb() { + GetWorkspaceRequestPb pb = new GetWorkspaceRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static GetWorkspaceRequest fromPb(GetWorkspaceRequestPb pb) { + GetWorkspaceRequest model = new GetWorkspaceRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class GetWorkspaceRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetWorkspaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceRequestPb pb = mapper.readValue(p, GetWorkspaceRequestPb.class); + return GetWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequestPb.java new file mode 100755 index 000000000..260552d66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a workspace */ +@Generated +class GetWorkspaceRequestPb { + @JsonIgnore private Long workspaceId; + + public GetWorkspaceRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceRequestPb that = (GetWorkspaceRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceRequestPb.class).add("workspaceId", workspaceId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java index 064319e4f..488c3784f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The configurations for the GKE cluster of a Databricks workspace. */ @Generated +@JsonSerialize(using = GkeConfig.GkeConfigSerializer.class) +@JsonDeserialize(using = GkeConfig.GkeConfigDeserializer.class) public class GkeConfig { /** * Specifies the network connectivity types for the GKE nodes and the GKE master network. @@ -19,7 +30,6 @@ public class GkeConfig { *

Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE * cluster have public IP addresses. */ - @JsonProperty("connectivity_type") private GkeConfigConnectivityType connectivityType; /** @@ -28,7 +38,6 @@ public class GkeConfig { * *

It must be exactly as big as `/28`. */ - @JsonProperty("master_ip_range") private String masterIpRange; public GkeConfig setConnectivityType(GkeConfigConnectivityType connectivityType) { @@ -70,4 +79,39 @@ public String toString() { .add("masterIpRange", masterIpRange) .toString(); } + + GkeConfigPb toPb() { + GkeConfigPb pb = new GkeConfigPb(); + pb.setConnectivityType(connectivityType); + pb.setMasterIpRange(masterIpRange); + + return pb; + } + + static GkeConfig fromPb(GkeConfigPb pb) { + GkeConfig model = new GkeConfig(); + model.setConnectivityType(pb.getConnectivityType()); + model.setMasterIpRange(pb.getMasterIpRange()); + + return model; + } + + public static class GkeConfigSerializer extends JsonSerializer { + @Override + public void serialize(GkeConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GkeConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GkeConfigDeserializer extends JsonDeserializer { + @Override + public GkeConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GkeConfigPb pb = mapper.readValue(p, GkeConfigPb.class); + return GkeConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfigPb.java new file mode 100755 index 000000000..f4ae218d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfigPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The configurations for the GKE cluster of a Databricks workspace. */ +@Generated +class GkeConfigPb { + @JsonProperty("connectivity_type") + private GkeConfigConnectivityType connectivityType; + + @JsonProperty("master_ip_range") + private String masterIpRange; + + public GkeConfigPb setConnectivityType(GkeConfigConnectivityType connectivityType) { + this.connectivityType = connectivityType; + return this; + } + + public GkeConfigConnectivityType getConnectivityType() { + return connectivityType; + } + + public GkeConfigPb setMasterIpRange(String masterIpRange) { + this.masterIpRange = masterIpRange; + return this; + } + + public String getMasterIpRange() { + return masterIpRange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GkeConfigPb that = (GkeConfigPb) o; + return Objects.equals(connectivityType, that.connectivityType) + && Objects.equals(masterIpRange, that.masterIpRange); + } + + @Override + public int hashCode() { + return Objects.hash(connectivityType, masterIpRange); + } + + @Override + public String toString() { + return new ToStringer(GkeConfigPb.class) + .add("connectivityType", connectivityType) + .add("masterIpRange", masterIpRange) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java index 428d49863..4f8f1c5da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java @@ -4,45 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Network.NetworkSerializer.class) +@JsonDeserialize(using = Network.NetworkDeserializer.class) public class Network { /** The Databricks account ID associated with this network configuration. */ - @JsonProperty("account_id") private String accountId; /** Time in epoch milliseconds when the network was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Array of error messages about the network configuration. */ - @JsonProperty("error_messages") private Collection errorMessages; /** * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and * secondary IP ranges). */ - @JsonProperty("gcp_network_info") private GcpNetworkInfo gcpNetworkInfo; /** The Databricks network configuration ID. */ - @JsonProperty("network_id") private String networkId; /** The human-readable name of the network configuration. */ - @JsonProperty("network_name") private String networkName; /** */ - @JsonProperty("security_group_ids") private Collection securityGroupIds; /** */ - @JsonProperty("subnet_ids") private Collection subnetIds; /** @@ -51,29 +54,24 @@ public class Network { * *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ */ - @JsonProperty("vpc_endpoints") private NetworkVpcEndpoints vpcEndpoints; /** * The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple * networks. */ - @JsonProperty("vpc_id") private String vpcId; /** * The status of this network configuration object in terms of its use in a workspace: * * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. */ - @JsonProperty("vpc_status") private VpcStatus vpcStatus; /** Array of warning messages about the network configuration. */ - @JsonProperty("warning_messages") private Collection warningMessages; /** Workspace ID associated with this network configuration. */ - @JsonProperty("workspace_id") private Long workspaceId; public Network setAccountId(String accountId) { @@ -249,4 +247,61 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + NetworkPb toPb() { + NetworkPb pb = new NetworkPb(); + pb.setAccountId(accountId); + pb.setCreationTime(creationTime); + pb.setErrorMessages(errorMessages); + pb.setGcpNetworkInfo(gcpNetworkInfo); + pb.setNetworkId(networkId); + pb.setNetworkName(networkName); + pb.setSecurityGroupIds(securityGroupIds); + pb.setSubnetIds(subnetIds); + pb.setVpcEndpoints(vpcEndpoints); + pb.setVpcId(vpcId); + pb.setVpcStatus(vpcStatus); + pb.setWarningMessages(warningMessages); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static Network fromPb(NetworkPb pb) { + Network model = new Network(); + model.setAccountId(pb.getAccountId()); + model.setCreationTime(pb.getCreationTime()); + model.setErrorMessages(pb.getErrorMessages()); + model.setGcpNetworkInfo(pb.getGcpNetworkInfo()); + model.setNetworkId(pb.getNetworkId()); + model.setNetworkName(pb.getNetworkName()); + model.setSecurityGroupIds(pb.getSecurityGroupIds()); + model.setSubnetIds(pb.getSubnetIds()); + model.setVpcEndpoints(pb.getVpcEndpoints()); + model.setVpcId(pb.getVpcId()); + model.setVpcStatus(pb.getVpcStatus()); + model.setWarningMessages(pb.getWarningMessages()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class NetworkSerializer extends JsonSerializer { + @Override + public void serialize(Network value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkDeserializer extends JsonDeserializer { + @Override + public Network deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkPb pb = mapper.readValue(p, NetworkPb.class); + return Network.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java index 8d00d7c3e..ebe13f055 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NetworkHealth.NetworkHealthSerializer.class) +@JsonDeserialize(using = NetworkHealth.NetworkHealthDeserializer.class) public class NetworkHealth { /** Details of the error. */ - @JsonProperty("error_message") private String errorMessage; /** * The AWS resource associated with this error: credentials, VPC, subnet, security group, or * network ACL. */ - @JsonProperty("error_type") private ErrorType errorType; public NetworkHealth setErrorMessage(String errorMessage) { @@ -59,4 +68,39 @@ public String toString() { .add("errorType", errorType) .toString(); } + + NetworkHealthPb toPb() { + NetworkHealthPb pb = new NetworkHealthPb(); + pb.setErrorMessage(errorMessage); + pb.setErrorType(errorType); + + return pb; + } + + static NetworkHealth fromPb(NetworkHealthPb pb) { + NetworkHealth model = new NetworkHealth(); + model.setErrorMessage(pb.getErrorMessage()); + model.setErrorType(pb.getErrorType()); + + return model; + } + + public static class NetworkHealthSerializer extends JsonSerializer { + @Override + public void serialize(NetworkHealth value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkHealthPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkHealthDeserializer extends JsonDeserializer { + @Override + public NetworkHealth deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkHealthPb pb = mapper.readValue(p, NetworkHealthPb.class); + return NetworkHealth.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealthPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealthPb.java new file mode 100755 index 000000000..c9b793acd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealthPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NetworkHealthPb { + @JsonProperty("error_message") + private String errorMessage; + + @JsonProperty("error_type") + private ErrorType errorType; + + public NetworkHealthPb setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public NetworkHealthPb setErrorType(ErrorType errorType) { + this.errorType = errorType; + return this; + } + + public ErrorType getErrorType() { + return errorType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkHealthPb that = (NetworkHealthPb) o; + return Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(errorType, that.errorType); + } + + @Override + public int hashCode() { + return Objects.hash(errorMessage, errorType); + } + + @Override + public String toString() { + return new ToStringer(NetworkHealthPb.class) + .add("errorMessage", errorMessage) + .add("errorType", errorType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkPb.java new file mode 100755 index 000000000..c78874f9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class NetworkPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("error_messages") + private Collection errorMessages; + + @JsonProperty("gcp_network_info") + private GcpNetworkInfo gcpNetworkInfo; + + @JsonProperty("network_id") + private String networkId; + + @JsonProperty("network_name") + private String networkName; + + @JsonProperty("security_group_ids") + private Collection securityGroupIds; + + @JsonProperty("subnet_ids") + private Collection subnetIds; + + @JsonProperty("vpc_endpoints") + private NetworkVpcEndpoints vpcEndpoints; + + @JsonProperty("vpc_id") + private String vpcId; + + @JsonProperty("vpc_status") + private VpcStatus vpcStatus; + + @JsonProperty("warning_messages") + private Collection warningMessages; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public NetworkPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public NetworkPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NetworkPb setErrorMessages(Collection errorMessages) { + this.errorMessages = errorMessages; + return this; + } + + public Collection getErrorMessages() { + return errorMessages; + } + + public NetworkPb setGcpNetworkInfo(GcpNetworkInfo gcpNetworkInfo) { + this.gcpNetworkInfo = gcpNetworkInfo; + return this; + } + + public GcpNetworkInfo getGcpNetworkInfo() { + return gcpNetworkInfo; + } + + public NetworkPb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + public NetworkPb setNetworkName(String networkName) { + this.networkName = networkName; + return this; + } + + public String getNetworkName() { + return networkName; + } + + public NetworkPb setSecurityGroupIds(Collection securityGroupIds) { + this.securityGroupIds = securityGroupIds; + return this; + } + + public Collection getSecurityGroupIds() { + return securityGroupIds; + } + + public NetworkPb setSubnetIds(Collection subnetIds) { + this.subnetIds = subnetIds; + return this; + } + + public Collection getSubnetIds() { + return subnetIds; + } + + public NetworkPb setVpcEndpoints(NetworkVpcEndpoints vpcEndpoints) { + this.vpcEndpoints = vpcEndpoints; + return this; + } + + public NetworkVpcEndpoints getVpcEndpoints() { + return vpcEndpoints; + } + + public NetworkPb setVpcId(String vpcId) { + this.vpcId = vpcId; + return this; + } + + public String getVpcId() { + return vpcId; + } + + public NetworkPb setVpcStatus(VpcStatus vpcStatus) { + this.vpcStatus = vpcStatus; + return this; + } + + public VpcStatus getVpcStatus() { + return vpcStatus; + } + + public NetworkPb setWarningMessages(Collection warningMessages) { + this.warningMessages = warningMessages; + return this; + } + + public Collection getWarningMessages() { + return warningMessages; + } + + public NetworkPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkPb that = (NetworkPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(errorMessages, that.errorMessages) + && Objects.equals(gcpNetworkInfo, that.gcpNetworkInfo) + && Objects.equals(networkId, that.networkId) + && Objects.equals(networkName, that.networkName) + && Objects.equals(securityGroupIds, that.securityGroupIds) + && Objects.equals(subnetIds, that.subnetIds) + && Objects.equals(vpcEndpoints, that.vpcEndpoints) + && Objects.equals(vpcId, that.vpcId) + && Objects.equals(vpcStatus, that.vpcStatus) + && Objects.equals(warningMessages, that.warningMessages) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + creationTime, + errorMessages, + gcpNetworkInfo, + networkId, + networkName, + securityGroupIds, + subnetIds, + vpcEndpoints, + vpcId, + vpcStatus, + warningMessages, + workspaceId); + } + + @Override + public String toString() { + return new ToStringer(NetworkPb.class) + .add("accountId", accountId) + .add("creationTime", creationTime) + .add("errorMessages", errorMessages) + .add("gcpNetworkInfo", gcpNetworkInfo) + .add("networkId", networkId) + .add("networkName", networkName) + .add("securityGroupIds", securityGroupIds) + .add("subnetIds", subnetIds) + .add("vpcEndpoints", vpcEndpoints) + .add("vpcId", vpcId) + .add("vpcStatus", vpcStatus) + .add("warningMessages", warningMessages) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java index 962a872a2..22057ac9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -15,16 +24,16 @@ *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ */ @Generated +@JsonSerialize(using = NetworkVpcEndpoints.NetworkVpcEndpointsSerializer.class) +@JsonDeserialize(using = NetworkVpcEndpoints.NetworkVpcEndpointsDeserializer.class) public class NetworkVpcEndpoints { /** * The VPC endpoint ID used by this network to access the Databricks secure cluster connectivity * relay. */ - @JsonProperty("dataplane_relay") private Collection dataplaneRelay; /** The VPC endpoint ID used by this network to access the Databricks REST API. */ - @JsonProperty("rest_api") private Collection restApi; public NetworkVpcEndpoints setDataplaneRelay(Collection dataplaneRelay) { @@ -66,4 +75,41 @@ public String toString() { .add("restApi", restApi) .toString(); } + + NetworkVpcEndpointsPb toPb() { + NetworkVpcEndpointsPb pb = new NetworkVpcEndpointsPb(); + pb.setDataplaneRelay(dataplaneRelay); + pb.setRestApi(restApi); + + return pb; + } + + static NetworkVpcEndpoints fromPb(NetworkVpcEndpointsPb pb) { + NetworkVpcEndpoints model = new NetworkVpcEndpoints(); + model.setDataplaneRelay(pb.getDataplaneRelay()); + model.setRestApi(pb.getRestApi()); + + return model; + } + + public static class NetworkVpcEndpointsSerializer extends JsonSerializer { + @Override + public void serialize(NetworkVpcEndpoints value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkVpcEndpointsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkVpcEndpointsDeserializer + extends JsonDeserializer { + @Override + public NetworkVpcEndpoints deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkVpcEndpointsPb pb = mapper.readValue(p, NetworkVpcEndpointsPb.class); + return NetworkVpcEndpoints.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpointsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpointsPb.java new file mode 100755 index 000000000..b9ca6d5ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpointsPb.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * If specified, contains the VPC endpoints used to allow cluster communication from this VPC over + * [AWS PrivateLink]. + * + *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ + */ +@Generated +class NetworkVpcEndpointsPb { + @JsonProperty("dataplane_relay") + private Collection dataplaneRelay; + + @JsonProperty("rest_api") + private Collection restApi; + + public NetworkVpcEndpointsPb setDataplaneRelay(Collection dataplaneRelay) { + this.dataplaneRelay = dataplaneRelay; + return this; + } + + public Collection getDataplaneRelay() { + return dataplaneRelay; + } + + public NetworkVpcEndpointsPb setRestApi(Collection restApi) { + this.restApi = restApi; + return this; + } + + public Collection getRestApi() { + return restApi; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkVpcEndpointsPb that = (NetworkVpcEndpointsPb) o; + return Objects.equals(dataplaneRelay, that.dataplaneRelay) + && Objects.equals(restApi, that.restApi); + } + + @Override + public int hashCode() { + return Objects.hash(dataplaneRelay, restApi); + } + + @Override + public String toString() { + return new ToStringer(NetworkVpcEndpointsPb.class) + .add("dataplaneRelay", dataplaneRelay) + .add("restApi", restApi) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java index 8db624574..bd14b45ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NetworkWarning.NetworkWarningSerializer.class) +@JsonDeserialize(using = NetworkWarning.NetworkWarningDeserializer.class) public class NetworkWarning { /** Details of the warning. */ - @JsonProperty("warning_message") private String warningMessage; /** The AWS resource associated with this warning: a subnet or a security group. */ - @JsonProperty("warning_type") private WarningType warningType; public NetworkWarning setWarningMessage(String warningMessage) { @@ -56,4 +65,40 @@ public String toString() { .add("warningType", warningType) .toString(); } + + NetworkWarningPb toPb() { + NetworkWarningPb pb = new NetworkWarningPb(); + pb.setWarningMessage(warningMessage); + pb.setWarningType(warningType); + + return pb; + } + + static NetworkWarning fromPb(NetworkWarningPb pb) { + NetworkWarning model = new NetworkWarning(); + model.setWarningMessage(pb.getWarningMessage()); + model.setWarningType(pb.getWarningType()); + + return model; + } + + public static class NetworkWarningSerializer extends JsonSerializer { + @Override + public void serialize(NetworkWarning value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkWarningPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkWarningDeserializer extends JsonDeserializer { + @Override + public NetworkWarning deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkWarningPb pb = mapper.readValue(p, NetworkWarningPb.class); + return NetworkWarning.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarningPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarningPb.java new file mode 100755 index 000000000..5945aaa03 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarningPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NetworkWarningPb { + @JsonProperty("warning_message") + private String warningMessage; + + @JsonProperty("warning_type") + private WarningType warningType; + + public NetworkWarningPb setWarningMessage(String warningMessage) { + this.warningMessage = warningMessage; + return this; + } + + public String getWarningMessage() { + return warningMessage; + } + + public NetworkWarningPb setWarningType(WarningType warningType) { + this.warningType = warningType; + return this; + } + + public WarningType getWarningType() { + return warningType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkWarningPb that = (NetworkWarningPb) o; + return Objects.equals(warningMessage, that.warningMessage) + && Objects.equals(warningType, that.warningType); + } + + @Override + public int hashCode() { + return Objects.hash(warningMessage, warningType); + } + + @Override + public String toString() { + return new ToStringer(NetworkWarningPb.class) + .add("warningMessage", warningMessage) + .add("warningType", warningType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java index cdd5f594b..5ca6e02b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java @@ -22,7 +22,7 @@ public Network create(CreateNetworkRequest request) { String path = String.format("/api/2.0/accounts/%s/networks", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Network.class); @@ -39,7 +39,7 @@ public void delete(DeleteNetworkRequest request) { apiClient.configuredAccountID(), request.getNetworkId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public Network get(GetNetworkRequest request) { apiClient.configuredAccountID(), request.getNetworkId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Network.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java index 5fd0babfc..8690c4087 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java @@ -24,7 +24,7 @@ public PrivateAccessSettings create(UpsertPrivateAccessSettingsRequest request) "/api/2.0/accounts/%s/private-access-settings", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PrivateAccessSettings.class); @@ -41,7 +41,7 @@ public void delete(DeletePrivateAccesRequest request) { apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public PrivateAccessSettings get(GetPrivateAccesRequest request) { apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PrivateAccessSettings.class); } catch (IOException e) { @@ -83,7 +83,7 @@ public void replace(UpsertPrivateAccessSettingsRequest request) { apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ReplaceResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java index 7be0b8b37..605b6ade2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PrivateAccessSettings.PrivateAccessSettingsSerializer.class) +@JsonDeserialize(using = PrivateAccessSettings.PrivateAccessSettingsDeserializer.class) public class PrivateAccessSettings { /** The Databricks account ID that hosts the credential. */ - @JsonProperty("account_id") private String accountId; /** An array of Databricks VPC endpoint IDs. */ - @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; /** @@ -25,15 +34,12 @@ public class PrivateAccessSettings { * your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your * workspace. For details, see `allowed_vpc_endpoint_ids`. */ - @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; /** Databricks private access settings ID. */ - @JsonProperty("private_access_settings_id") private String privateAccessSettingsId; /** The human-readable name of the private access settings object. */ - @JsonProperty("private_access_settings_name") private String privateAccessSettingsName; /** @@ -42,11 +48,9 @@ public class PrivateAccessSettings { * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is * enabled. */ - @JsonProperty("public_access_enabled") private Boolean publicAccessEnabled; /** The cloud region for workspaces attached to this private access settings object. */ - @JsonProperty("region") private String region; public PrivateAccessSettings setAccountId(String accountId) { @@ -150,4 +154,53 @@ public String toString() { .add("region", region) .toString(); } + + PrivateAccessSettingsPb toPb() { + PrivateAccessSettingsPb pb = new PrivateAccessSettingsPb(); + pb.setAccountId(accountId); + pb.setAllowedVpcEndpointIds(allowedVpcEndpointIds); + pb.setPrivateAccessLevel(privateAccessLevel); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + pb.setPrivateAccessSettingsName(privateAccessSettingsName); + pb.setPublicAccessEnabled(publicAccessEnabled); + pb.setRegion(region); + + return pb; + } + + static PrivateAccessSettings fromPb(PrivateAccessSettingsPb pb) { + PrivateAccessSettings model = new PrivateAccessSettings(); + model.setAccountId(pb.getAccountId()); + model.setAllowedVpcEndpointIds(pb.getAllowedVpcEndpointIds()); + model.setPrivateAccessLevel(pb.getPrivateAccessLevel()); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + model.setPrivateAccessSettingsName(pb.getPrivateAccessSettingsName()); + model.setPublicAccessEnabled(pb.getPublicAccessEnabled()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class PrivateAccessSettingsSerializer + extends JsonSerializer { + @Override + public void serialize( + PrivateAccessSettings value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PrivateAccessSettingsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PrivateAccessSettingsDeserializer + extends JsonDeserializer { + @Override + public PrivateAccessSettings deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PrivateAccessSettingsPb pb = mapper.readValue(p, PrivateAccessSettingsPb.class); + return PrivateAccessSettings.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettingsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettingsPb.java new file mode 100755 index 000000000..36a8effea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettingsPb.java @@ -0,0 +1,136 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PrivateAccessSettingsPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("allowed_vpc_endpoint_ids") + private Collection allowedVpcEndpointIds; + + @JsonProperty("private_access_level") + private PrivateAccessLevel privateAccessLevel; + + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + + @JsonProperty("private_access_settings_name") + private String privateAccessSettingsName; + + @JsonProperty("public_access_enabled") + private Boolean publicAccessEnabled; + + @JsonProperty("region") + private String region; + + public PrivateAccessSettingsPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public PrivateAccessSettingsPb setAllowedVpcEndpointIds( + Collection allowedVpcEndpointIds) { + this.allowedVpcEndpointIds = allowedVpcEndpointIds; + return this; + } + + public Collection getAllowedVpcEndpointIds() { + return allowedVpcEndpointIds; + } + + public PrivateAccessSettingsPb setPrivateAccessLevel(PrivateAccessLevel privateAccessLevel) { + this.privateAccessLevel = privateAccessLevel; + return this; + } + + public PrivateAccessLevel getPrivateAccessLevel() { + return privateAccessLevel; + } + + public PrivateAccessSettingsPb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + public PrivateAccessSettingsPb setPrivateAccessSettingsName(String privateAccessSettingsName) { + this.privateAccessSettingsName = privateAccessSettingsName; + return this; + } + + public String getPrivateAccessSettingsName() { + return privateAccessSettingsName; + } + + public PrivateAccessSettingsPb setPublicAccessEnabled(Boolean publicAccessEnabled) { + this.publicAccessEnabled = publicAccessEnabled; + return this; + } + + public Boolean getPublicAccessEnabled() { + return publicAccessEnabled; + } + + public PrivateAccessSettingsPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PrivateAccessSettingsPb that = (PrivateAccessSettingsPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(allowedVpcEndpointIds, that.allowedVpcEndpointIds) + && Objects.equals(privateAccessLevel, that.privateAccessLevel) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) + && Objects.equals(privateAccessSettingsName, that.privateAccessSettingsName) + && Objects.equals(publicAccessEnabled, that.publicAccessEnabled) + && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + allowedVpcEndpointIds, + privateAccessLevel, + privateAccessSettingsId, + privateAccessSettingsName, + publicAccessEnabled, + region); + } + + @Override + public String toString() { + return new ToStringer(PrivateAccessSettingsPb.class) + .add("accountId", accountId) + .add("allowedVpcEndpointIds", allowedVpcEndpointIds) + .add("privateAccessLevel", privateAccessLevel) + .add("privateAccessSettingsId", privateAccessSettingsId) + .add("privateAccessSettingsName", privateAccessSettingsName) + .add("publicAccessEnabled", publicAccessEnabled) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java index de0d3423e..fb81c8a36 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ReplaceResponse.ReplaceResponseSerializer.class) +@JsonDeserialize(using = ReplaceResponse.ReplaceResponseDeserializer.class) public class ReplaceResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(ReplaceResponse.class).toString(); } + + ReplaceResponsePb toPb() { + ReplaceResponsePb pb = new ReplaceResponsePb(); + + return pb; + } + + static ReplaceResponse fromPb(ReplaceResponsePb pb) { + ReplaceResponse model = new ReplaceResponse(); + + return model; + } + + public static class ReplaceResponseSerializer extends JsonSerializer { + @Override + public void serialize(ReplaceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReplaceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReplaceResponseDeserializer extends JsonDeserializer { + @Override + public ReplaceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReplaceResponsePb pb = mapper.readValue(p, ReplaceResponsePb.class); + return ReplaceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponsePb.java new file mode 100755 index 000000000..da0dfa57f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ReplaceResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ReplaceResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java index 4faf62f43..3e2f744aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Root S3 bucket information. */ @Generated +@JsonSerialize(using = RootBucketInfo.RootBucketInfoSerializer.class) +@JsonDeserialize(using = RootBucketInfo.RootBucketInfoDeserializer.class) public class RootBucketInfo { /** The name of the S3 bucket. */ - @JsonProperty("bucket_name") private String bucketName; public RootBucketInfo setBucketName(String bucketName) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(RootBucketInfo.class).add("bucketName", bucketName).toString(); } + + RootBucketInfoPb toPb() { + RootBucketInfoPb pb = new RootBucketInfoPb(); + pb.setBucketName(bucketName); + + return pb; + } + + static RootBucketInfo fromPb(RootBucketInfoPb pb) { + RootBucketInfo model = new RootBucketInfo(); + model.setBucketName(pb.getBucketName()); + + return model; + } + + public static class RootBucketInfoSerializer extends JsonSerializer { + @Override + public void serialize(RootBucketInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RootBucketInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RootBucketInfoDeserializer extends JsonDeserializer { + @Override + public RootBucketInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RootBucketInfoPb pb = mapper.readValue(p, RootBucketInfoPb.class); + return RootBucketInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfoPb.java new file mode 100755 index 000000000..a0774c85b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfoPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Root S3 bucket information. */ +@Generated +class RootBucketInfoPb { + @JsonProperty("bucket_name") + private String bucketName; + + public RootBucketInfoPb setBucketName(String bucketName) { + this.bucketName = bucketName; + return this; + } + + public String getBucketName() { + return bucketName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RootBucketInfoPb that = (RootBucketInfoPb) o; + return Objects.equals(bucketName, that.bucketName); + } + + @Override + public int hashCode() { + return Objects.hash(bucketName); + } + + @Override + public String toString() { + return new ToStringer(RootBucketInfoPb.class).add("bucketName", bucketName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java index 59ce78074..c8b9ceba8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StorageConfiguration.StorageConfigurationSerializer.class) +@JsonDeserialize(using = StorageConfiguration.StorageConfigurationDeserializer.class) public class StorageConfiguration { /** The Databricks account ID that hosts the credential. */ - @JsonProperty("account_id") private String accountId; /** Time in epoch milliseconds when the storage configuration was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Root S3 bucket information. */ - @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; /** Databricks storage configuration ID. */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** The human-readable name of the storage configuration. */ - @JsonProperty("storage_configuration_name") private String storageConfigurationName; public StorageConfiguration setAccountId(String accountId) { @@ -102,4 +108,48 @@ public String toString() { .add("storageConfigurationName", storageConfigurationName) .toString(); } + + StorageConfigurationPb toPb() { + StorageConfigurationPb pb = new StorageConfigurationPb(); + pb.setAccountId(accountId); + pb.setCreationTime(creationTime); + pb.setRootBucketInfo(rootBucketInfo); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setStorageConfigurationName(storageConfigurationName); + + return pb; + } + + static StorageConfiguration fromPb(StorageConfigurationPb pb) { + StorageConfiguration model = new StorageConfiguration(); + model.setAccountId(pb.getAccountId()); + model.setCreationTime(pb.getCreationTime()); + model.setRootBucketInfo(pb.getRootBucketInfo()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setStorageConfigurationName(pb.getStorageConfigurationName()); + + return model; + } + + public static class StorageConfigurationSerializer extends JsonSerializer { + @Override + public void serialize( + StorageConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StorageConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StorageConfigurationDeserializer + extends JsonDeserializer { + @Override + public StorageConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StorageConfigurationPb pb = mapper.readValue(p, StorageConfigurationPb.class); + return StorageConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfigurationPb.java new file mode 100755 index 000000000..8cdd43633 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfigurationPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StorageConfigurationPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("root_bucket_info") + private RootBucketInfo rootBucketInfo; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("storage_configuration_name") + private String storageConfigurationName; + + public StorageConfigurationPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public StorageConfigurationPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public StorageConfigurationPb setRootBucketInfo(RootBucketInfo rootBucketInfo) { + this.rootBucketInfo = rootBucketInfo; + return this; + } + + public RootBucketInfo getRootBucketInfo() { + return rootBucketInfo; + } + + public StorageConfigurationPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public StorageConfigurationPb setStorageConfigurationName(String storageConfigurationName) { + this.storageConfigurationName = storageConfigurationName; + return this; + } + + public String getStorageConfigurationName() { + return storageConfigurationName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StorageConfigurationPb that = (StorageConfigurationPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(rootBucketInfo, that.rootBucketInfo) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(storageConfigurationName, that.storageConfigurationName); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, creationTime, rootBucketInfo, storageConfigurationId, storageConfigurationName); + } + + @Override + public String toString() { + return new ToStringer(StorageConfigurationPb.class) + .add("accountId", accountId) + .add("creationTime", creationTime) + .add("rootBucketInfo", rootBucketInfo) + .add("storageConfigurationId", storageConfigurationId) + .add("storageConfigurationName", storageConfigurationName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java index 3489de525..a99f6e511 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java @@ -24,7 +24,7 @@ public StorageConfiguration create(CreateStorageConfigurationRequest request) { "/api/2.0/accounts/%s/storage-configurations", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, StorageConfiguration.class); @@ -41,7 +41,7 @@ public void delete(DeleteStorageRequest request) { apiClient.configuredAccountID(), request.getStorageConfigurationId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -57,7 +57,7 @@ public StorageConfiguration get(GetStorageRequest request) { apiClient.configuredAccountID(), request.getStorageConfigurationId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, StorageConfiguration.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java index ee42e7691..515931893 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StsRole.StsRoleSerializer.class) +@JsonDeserialize(using = StsRole.StsRoleDeserializer.class) public class StsRole { /** * The external ID that needs to be trusted by the cross-account role. This is always your * Databricks account ID. */ - @JsonProperty("external_id") private String externalId; /** The Amazon Resource Name (ARN) of the cross account role. */ - @JsonProperty("role_arn") private String roleArn; public StsRole setExternalId(String externalId) { @@ -58,4 +67,39 @@ public String toString() { .add("roleArn", roleArn) .toString(); } + + StsRolePb toPb() { + StsRolePb pb = new StsRolePb(); + pb.setExternalId(externalId); + pb.setRoleArn(roleArn); + + return pb; + } + + static StsRole fromPb(StsRolePb pb) { + StsRole model = new StsRole(); + model.setExternalId(pb.getExternalId()); + model.setRoleArn(pb.getRoleArn()); + + return model; + } + + public static class StsRoleSerializer extends JsonSerializer { + @Override + public void serialize(StsRole value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StsRolePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StsRoleDeserializer extends JsonDeserializer { + @Override + public StsRole deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StsRolePb pb = mapper.readValue(p, StsRolePb.class); + return StsRole.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRolePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRolePb.java new file mode 100755 index 000000000..2b52e7581 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRolePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StsRolePb { + @JsonProperty("external_id") + private String externalId; + + @JsonProperty("role_arn") + private String roleArn; + + public StsRolePb setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public StsRolePb setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StsRolePb that = (StsRolePb) o; + return Objects.equals(externalId, that.externalId) && Objects.equals(roleArn, that.roleArn); + } + + @Override + public int hashCode() { + return Objects.hash(externalId, roleArn); + } + + @Override + public String toString() { + return new ToStringer(StsRolePb.class) + .add("externalId", externalId) + .add("roleArn", roleArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java index 0ffcdf69e..d3f17711a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponsePb.java new file mode 100755 index 000000000..5e4dbd999 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java index a690adac9..3b7648573 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateWorkspaceRequest.UpdateWorkspaceRequestSerializer.class) +@JsonDeserialize(using = UpdateWorkspaceRequest.UpdateWorkspaceRequestDeserializer.class) public class UpdateWorkspaceRequest { /** * The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is * available only for updating failed workspaces. */ - @JsonProperty("aws_region") private String awsRegion; /** * ID of the workspace's credential configuration object. This parameter is available for updating * both failed and running workspaces. */ - @JsonProperty("credentials_id") private String credentialsId; /** @@ -30,18 +38,15 @@ public class UpdateWorkspaceRequest { * string of utf-8 characters. The value can be an empty string, with maximum length of 255 * characters. The key can be of maximum length of 127 characters, and cannot be empty. */ - @JsonProperty("custom_tags") private Map customTags; /** * The ID of the workspace's managed services encryption key configuration object. This parameter * is available only for updating failed workspaces. */ - @JsonProperty("managed_services_customer_managed_key_id") private String managedServicesCustomerManagedKeyId; /** */ - @JsonProperty("network_connectivity_config_id") private String networkConnectivityConfigId; /** @@ -49,32 +54,28 @@ public class UpdateWorkspaceRequest { * customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC * to a customer-managed VPC by updating the workspace to add a network configuration ID. */ - @JsonProperty("network_id") private String networkId; /** * The ID of the workspace's private access settings configuration object. This parameter is * available only for updating failed workspaces. */ - @JsonProperty("private_access_settings_id") private String privateAccessSettingsId; /** * The ID of the workspace's storage configuration object. This parameter is available only for * updating failed workspaces. */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** * The ID of the key configuration object for workspace storage. This parameter is available for * updating both failed and running workspaces. */ - @JsonProperty("storage_customer_managed_key_id") private String storageCustomerManagedKeyId; /** Workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public UpdateWorkspaceRequest setAwsRegion(String awsRegion) { this.awsRegion = awsRegion; @@ -215,4 +216,59 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + UpdateWorkspaceRequestPb toPb() { + UpdateWorkspaceRequestPb pb = new UpdateWorkspaceRequestPb(); + pb.setAwsRegion(awsRegion); + pb.setCredentialsId(credentialsId); + pb.setCustomTags(customTags); + pb.setManagedServicesCustomerManagedKeyId(managedServicesCustomerManagedKeyId); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setNetworkId(networkId); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setStorageCustomerManagedKeyId(storageCustomerManagedKeyId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static UpdateWorkspaceRequest fromPb(UpdateWorkspaceRequestPb pb) { + UpdateWorkspaceRequest model = new UpdateWorkspaceRequest(); + model.setAwsRegion(pb.getAwsRegion()); + model.setCredentialsId(pb.getCredentialsId()); + model.setCustomTags(pb.getCustomTags()); + model.setManagedServicesCustomerManagedKeyId(pb.getManagedServicesCustomerManagedKeyId()); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setNetworkId(pb.getNetworkId()); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setStorageCustomerManagedKeyId(pb.getStorageCustomerManagedKeyId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class UpdateWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceRequestPb pb = mapper.readValue(p, UpdateWorkspaceRequestPb.class); + return UpdateWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequestPb.java new file mode 100755 index 000000000..1a6a97ff0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequestPb.java @@ -0,0 +1,184 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class UpdateWorkspaceRequestPb { + @JsonProperty("aws_region") + private String awsRegion; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("managed_services_customer_managed_key_id") + private String managedServicesCustomerManagedKeyId; + + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + @JsonProperty("network_id") + private String networkId; + + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("storage_customer_managed_key_id") + private String storageCustomerManagedKeyId; + + @JsonIgnore private Long workspaceId; + + public UpdateWorkspaceRequestPb setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + return this; + } + + public String getAwsRegion() { + return awsRegion; + } + + public UpdateWorkspaceRequestPb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public UpdateWorkspaceRequestPb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public UpdateWorkspaceRequestPb setManagedServicesCustomerManagedKeyId( + String managedServicesCustomerManagedKeyId) { + this.managedServicesCustomerManagedKeyId = managedServicesCustomerManagedKeyId; + return this; + } + + public String getManagedServicesCustomerManagedKeyId() { + return managedServicesCustomerManagedKeyId; + } + + public UpdateWorkspaceRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public UpdateWorkspaceRequestPb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + public UpdateWorkspaceRequestPb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + public UpdateWorkspaceRequestPb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public UpdateWorkspaceRequestPb setStorageCustomerManagedKeyId( + String storageCustomerManagedKeyId) { + this.storageCustomerManagedKeyId = storageCustomerManagedKeyId; + return this; + } + + public String getStorageCustomerManagedKeyId() { + return storageCustomerManagedKeyId; + } + + public UpdateWorkspaceRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceRequestPb that = (UpdateWorkspaceRequestPb) o; + return Objects.equals(awsRegion, that.awsRegion) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(customTags, that.customTags) + && Objects.equals( + managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(networkId, that.networkId) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash( + awsRegion, + credentialsId, + customTags, + managedServicesCustomerManagedKeyId, + networkConnectivityConfigId, + networkId, + privateAccessSettingsId, + storageConfigurationId, + storageCustomerManagedKeyId, + workspaceId); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceRequestPb.class) + .add("awsRegion", awsRegion) + .add("credentialsId", credentialsId) + .add("customTags", customTags) + .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("networkId", networkId) + .add("privateAccessSettingsId", privateAccessSettingsId) + .add("storageConfigurationId", storageConfigurationId) + .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java index ed1ea553d..e8249cca5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java @@ -4,12 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpsertPrivateAccessSettingsRequest.UpsertPrivateAccessSettingsRequestSerializer.class) +@JsonDeserialize( + using = UpsertPrivateAccessSettingsRequest.UpsertPrivateAccessSettingsRequestDeserializer.class) public class UpsertPrivateAccessSettingsRequest { /** * An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when @@ -25,7 +37,6 @@ public class UpsertPrivateAccessSettingsRequest { * *

[IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html */ - @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; /** @@ -35,14 +46,12 @@ public class UpsertPrivateAccessSettingsRequest { * your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your * workspace. For details, see `allowed_vpc_endpoint_ids`. */ - @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; /** Databricks Account API private access settings ID. */ - @JsonIgnore private String privateAccessSettingsId; + private String privateAccessSettingsId; /** The human-readable name of the private access settings object. */ - @JsonProperty("private_access_settings_name") private String privateAccessSettingsName; /** @@ -51,11 +60,9 @@ public class UpsertPrivateAccessSettingsRequest { * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is * enabled. */ - @JsonProperty("public_access_enabled") private Boolean publicAccessEnabled; /** The cloud region for workspaces associated with this private access settings object. */ - @JsonProperty("region") private String region; public UpsertPrivateAccessSettingsRequest setAllowedVpcEndpointIds( @@ -151,4 +158,52 @@ public String toString() { .add("region", region) .toString(); } + + UpsertPrivateAccessSettingsRequestPb toPb() { + UpsertPrivateAccessSettingsRequestPb pb = new UpsertPrivateAccessSettingsRequestPb(); + pb.setAllowedVpcEndpointIds(allowedVpcEndpointIds); + pb.setPrivateAccessLevel(privateAccessLevel); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + pb.setPrivateAccessSettingsName(privateAccessSettingsName); + pb.setPublicAccessEnabled(publicAccessEnabled); + pb.setRegion(region); + + return pb; + } + + static UpsertPrivateAccessSettingsRequest fromPb(UpsertPrivateAccessSettingsRequestPb pb) { + UpsertPrivateAccessSettingsRequest model = new UpsertPrivateAccessSettingsRequest(); + model.setAllowedVpcEndpointIds(pb.getAllowedVpcEndpointIds()); + model.setPrivateAccessLevel(pb.getPrivateAccessLevel()); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + model.setPrivateAccessSettingsName(pb.getPrivateAccessSettingsName()); + model.setPublicAccessEnabled(pb.getPublicAccessEnabled()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class UpsertPrivateAccessSettingsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpsertPrivateAccessSettingsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpsertPrivateAccessSettingsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpsertPrivateAccessSettingsRequestDeserializer + extends JsonDeserializer { + @Override + public UpsertPrivateAccessSettingsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpsertPrivateAccessSettingsRequestPb pb = + mapper.readValue(p, UpsertPrivateAccessSettingsRequestPb.class); + return UpsertPrivateAccessSettingsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequestPb.java new file mode 100755 index 000000000..ee1e479f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequestPb.java @@ -0,0 +1,124 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpsertPrivateAccessSettingsRequestPb { + @JsonProperty("allowed_vpc_endpoint_ids") + private Collection allowedVpcEndpointIds; + + @JsonProperty("private_access_level") + private PrivateAccessLevel privateAccessLevel; + + @JsonIgnore private String privateAccessSettingsId; + + @JsonProperty("private_access_settings_name") + private String privateAccessSettingsName; + + @JsonProperty("public_access_enabled") + private Boolean publicAccessEnabled; + + @JsonProperty("region") + private String region; + + public UpsertPrivateAccessSettingsRequestPb setAllowedVpcEndpointIds( + Collection allowedVpcEndpointIds) { + this.allowedVpcEndpointIds = allowedVpcEndpointIds; + return this; + } + + public Collection getAllowedVpcEndpointIds() { + return allowedVpcEndpointIds; + } + + public UpsertPrivateAccessSettingsRequestPb setPrivateAccessLevel( + PrivateAccessLevel privateAccessLevel) { + this.privateAccessLevel = privateAccessLevel; + return this; + } + + public PrivateAccessLevel getPrivateAccessLevel() { + return privateAccessLevel; + } + + public UpsertPrivateAccessSettingsRequestPb setPrivateAccessSettingsId( + String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + public UpsertPrivateAccessSettingsRequestPb setPrivateAccessSettingsName( + String privateAccessSettingsName) { + this.privateAccessSettingsName = privateAccessSettingsName; + return this; + } + + public String getPrivateAccessSettingsName() { + return privateAccessSettingsName; + } + + public UpsertPrivateAccessSettingsRequestPb setPublicAccessEnabled(Boolean publicAccessEnabled) { + this.publicAccessEnabled = publicAccessEnabled; + return this; + } + + public Boolean getPublicAccessEnabled() { + return publicAccessEnabled; + } + + public UpsertPrivateAccessSettingsRequestPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpsertPrivateAccessSettingsRequestPb that = (UpsertPrivateAccessSettingsRequestPb) o; + return Objects.equals(allowedVpcEndpointIds, that.allowedVpcEndpointIds) + && Objects.equals(privateAccessLevel, that.privateAccessLevel) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) + && Objects.equals(privateAccessSettingsName, that.privateAccessSettingsName) + && Objects.equals(publicAccessEnabled, that.publicAccessEnabled) + && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash( + allowedVpcEndpointIds, + privateAccessLevel, + privateAccessSettingsId, + privateAccessSettingsName, + publicAccessEnabled, + region); + } + + @Override + public String toString() { + return new ToStringer(UpsertPrivateAccessSettingsRequestPb.class) + .add("allowedVpcEndpointIds", allowedVpcEndpointIds) + .add("privateAccessLevel", privateAccessLevel) + .add("privateAccessSettingsId", privateAccessSettingsId) + .add("privateAccessSettingsName", privateAccessSettingsName) + .add("publicAccessEnabled", publicAccessEnabled) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java index 11e9be7b1..eb7435849 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = VpcEndpoint.VpcEndpointSerializer.class) +@JsonDeserialize(using = VpcEndpoint.VpcEndpointDeserializer.class) public class VpcEndpoint { /** The Databricks account ID that hosts the VPC endpoint configuration. */ - @JsonProperty("account_id") private String accountId; /** The AWS Account in which the VPC endpoint object exists. */ - @JsonProperty("aws_account_id") private String awsAccountId; /** @@ -26,19 +35,15 @@ public class VpcEndpoint { * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html * [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html */ - @JsonProperty("aws_endpoint_service_id") private String awsEndpointServiceId; /** The ID of the VPC endpoint object in AWS. */ - @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; /** The Google Cloud specific information for this Private Service Connect endpoint. */ - @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; /** The AWS region in which this VPC endpoint object exists. */ - @JsonProperty("region") private String region; /** @@ -48,7 +53,6 @@ public class VpcEndpoint { *

[AWS DescribeVpcEndpoint documentation]: * https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html */ - @JsonProperty("state") private String state; /** @@ -57,18 +61,15 @@ public class VpcEndpoint { * *

[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html */ - @JsonProperty("use_case") private EndpointUseCase useCase; /** * Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not * confuse this with the `aws_vpc_endpoint_id`, which is the ID within AWS of the VPC endpoint. */ - @JsonProperty("vpc_endpoint_id") private String vpcEndpointId; /** The human-readable name of the storage configuration. */ - @JsonProperty("vpc_endpoint_name") private String vpcEndpointName; public VpcEndpoint setAccountId(String accountId) { @@ -208,4 +209,55 @@ public String toString() { .add("vpcEndpointName", vpcEndpointName) .toString(); } + + VpcEndpointPb toPb() { + VpcEndpointPb pb = new VpcEndpointPb(); + pb.setAccountId(accountId); + pb.setAwsAccountId(awsAccountId); + pb.setAwsEndpointServiceId(awsEndpointServiceId); + pb.setAwsVpcEndpointId(awsVpcEndpointId); + pb.setGcpVpcEndpointInfo(gcpVpcEndpointInfo); + pb.setRegion(region); + pb.setState(state); + pb.setUseCase(useCase); + pb.setVpcEndpointId(vpcEndpointId); + pb.setVpcEndpointName(vpcEndpointName); + + return pb; + } + + static VpcEndpoint fromPb(VpcEndpointPb pb) { + VpcEndpoint model = new VpcEndpoint(); + model.setAccountId(pb.getAccountId()); + model.setAwsAccountId(pb.getAwsAccountId()); + model.setAwsEndpointServiceId(pb.getAwsEndpointServiceId()); + model.setAwsVpcEndpointId(pb.getAwsVpcEndpointId()); + model.setGcpVpcEndpointInfo(pb.getGcpVpcEndpointInfo()); + model.setRegion(pb.getRegion()); + model.setState(pb.getState()); + model.setUseCase(pb.getUseCase()); + model.setVpcEndpointId(pb.getVpcEndpointId()); + model.setVpcEndpointName(pb.getVpcEndpointName()); + + return model; + } + + public static class VpcEndpointSerializer extends JsonSerializer { + @Override + public void serialize(VpcEndpoint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VpcEndpointPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VpcEndpointDeserializer extends JsonDeserializer { + @Override + public VpcEndpoint deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VpcEndpointPb pb = mapper.readValue(p, VpcEndpointPb.class); + return VpcEndpoint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointPb.java new file mode 100755 index 000000000..8d30ad6b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class VpcEndpointPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("aws_account_id") + private String awsAccountId; + + @JsonProperty("aws_endpoint_service_id") + private String awsEndpointServiceId; + + @JsonProperty("aws_vpc_endpoint_id") + private String awsVpcEndpointId; + + @JsonProperty("gcp_vpc_endpoint_info") + private GcpVpcEndpointInfo gcpVpcEndpointInfo; + + @JsonProperty("region") + private String region; + + @JsonProperty("state") + private String state; + + @JsonProperty("use_case") + private EndpointUseCase useCase; + + @JsonProperty("vpc_endpoint_id") + private String vpcEndpointId; + + @JsonProperty("vpc_endpoint_name") + private String vpcEndpointName; + + public VpcEndpointPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public VpcEndpointPb setAwsAccountId(String awsAccountId) { + this.awsAccountId = awsAccountId; + return this; + } + + public String getAwsAccountId() { + return awsAccountId; + } + + public VpcEndpointPb setAwsEndpointServiceId(String awsEndpointServiceId) { + this.awsEndpointServiceId = awsEndpointServiceId; + return this; + } + + public String getAwsEndpointServiceId() { + return awsEndpointServiceId; + } + + public VpcEndpointPb setAwsVpcEndpointId(String awsVpcEndpointId) { + this.awsVpcEndpointId = awsVpcEndpointId; + return this; + } + + public String getAwsVpcEndpointId() { + return awsVpcEndpointId; + } + + public VpcEndpointPb setGcpVpcEndpointInfo(GcpVpcEndpointInfo gcpVpcEndpointInfo) { + this.gcpVpcEndpointInfo = gcpVpcEndpointInfo; + return this; + } + + public GcpVpcEndpointInfo getGcpVpcEndpointInfo() { + return gcpVpcEndpointInfo; + } + + public VpcEndpointPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public VpcEndpointPb setState(String state) { + this.state = state; + return this; + } + + public String getState() { + return state; + } + + public VpcEndpointPb setUseCase(EndpointUseCase useCase) { + this.useCase = useCase; + return this; + } + + public EndpointUseCase getUseCase() { + return useCase; + } + + public VpcEndpointPb setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + public VpcEndpointPb setVpcEndpointName(String vpcEndpointName) { + this.vpcEndpointName = vpcEndpointName; + return this; + } + + public String getVpcEndpointName() { + return vpcEndpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VpcEndpointPb that = (VpcEndpointPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(awsAccountId, that.awsAccountId) + && Objects.equals(awsEndpointServiceId, that.awsEndpointServiceId) + && Objects.equals(awsVpcEndpointId, that.awsVpcEndpointId) + && Objects.equals(gcpVpcEndpointInfo, that.gcpVpcEndpointInfo) + && Objects.equals(region, that.region) + && Objects.equals(state, that.state) + && Objects.equals(useCase, that.useCase) + && Objects.equals(vpcEndpointId, that.vpcEndpointId) + && Objects.equals(vpcEndpointName, that.vpcEndpointName); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + awsAccountId, + awsEndpointServiceId, + awsVpcEndpointId, + gcpVpcEndpointInfo, + region, + state, + useCase, + vpcEndpointId, + vpcEndpointName); + } + + @Override + public String toString() { + return new ToStringer(VpcEndpointPb.class) + .add("accountId", accountId) + .add("awsAccountId", awsAccountId) + .add("awsEndpointServiceId", awsEndpointServiceId) + .add("awsVpcEndpointId", awsVpcEndpointId) + .add("gcpVpcEndpointInfo", gcpVpcEndpointInfo) + .add("region", region) + .add("state", state) + .add("useCase", useCase) + .add("vpcEndpointId", vpcEndpointId) + .add("vpcEndpointName", vpcEndpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java index 68a709bc6..837175dc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java @@ -23,7 +23,7 @@ public VpcEndpoint create(CreateVpcEndpointRequest request) { String.format("/api/2.0/accounts/%s/vpc-endpoints", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, VpcEndpoint.class); @@ -40,7 +40,7 @@ public void delete(DeleteVpcEndpointRequest request) { apiClient.configuredAccountID(), request.getVpcEndpointId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public VpcEndpoint get(GetVpcEndpointRequest request) { apiClient.configuredAccountID(), request.getVpcEndpointId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, VpcEndpoint.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java index 4d6b61c9d..eb22746bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java @@ -4,38 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = Workspace.WorkspaceSerializer.class) +@JsonDeserialize(using = Workspace.WorkspaceDeserializer.class) public class Workspace { /** Databricks account ID. */ - @JsonProperty("account_id") private String accountId; /** The AWS region of the workspace data plane (for example, `us-west-2`). */ - @JsonProperty("aws_region") private String awsRegion; /** */ - @JsonProperty("azure_workspace_info") private AzureWorkspaceInfo azureWorkspaceInfo; /** The cloud name. This field always has the value `gcp`. */ - @JsonProperty("cloud") private String cloud; /** The general workspace configurations that are specific to cloud providers. */ - @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; /** Time in epoch milliseconds when the workspace was created. */ - @JsonProperty("creation_time") private Long creationTime; /** ID of the workspace's credential configuration object. */ - @JsonProperty("credentials_id") private String credentialsId; /** @@ -43,7 +47,6 @@ public class Workspace { * string of utf-8 characters. The value can be an empty string, with maximum length of 255 * characters. The key can be of maximum length of 127 characters, and cannot be empty. */ - @JsonProperty("custom_tags") private Map customTags; /** @@ -52,14 +55,12 @@ public class Workspace { * *

This value must be unique across all non-deleted deployments across all AWS regions. */ - @JsonProperty("deployment_name") private String deploymentName; /** * If this workspace is for a external customer, then external_customer_info is populated. If this * workspace is not for a external customer, then external_customer_info is empty. */ - @JsonProperty("external_customer_info") private ExternalCustomerInfo externalCustomerInfo; /** @@ -83,33 +84,27 @@ public class Workspace { *

[calculate subnet sizes for a new workspace]: * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html */ - @JsonProperty("gcp_managed_network_config") private GcpManagedNetworkConfig gcpManagedNetworkConfig; /** The configurations for the GKE cluster of a Databricks workspace. */ - @JsonProperty("gke_config") private GkeConfig gkeConfig; /** Whether no public IP is enabled for the workspace. */ - @JsonProperty("is_no_public_ip_enabled") private Boolean isNoPublicIpEnabled; /** * The Google Cloud region of the workspace data plane in your Google account (for example, * `us-east4`). */ - @JsonProperty("location") private String location; /** ID of the key configuration for encrypting managed services. */ - @JsonProperty("managed_services_customer_managed_key_id") private String managedServicesCustomerManagedKeyId; /** * The network configuration ID that is attached to the workspace. This field is available only if * the network is a customer-managed network. */ - @JsonProperty("network_id") private String networkId; /** @@ -117,7 +112,6 @@ public class Workspace { * *

[AWS Pricing]: https://databricks.com/product/aws-pricing */ - @JsonProperty("pricing_tier") private PricingTier pricingTier; /** @@ -131,34 +125,27 @@ public class Workspace { * PrivateLink]: * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html */ - @JsonProperty("private_access_settings_id") private String privateAccessSettingsId; /** ID of the workspace's storage configuration object. */ - @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** ID of the key configuration for encrypting workspace storage. */ - @JsonProperty("storage_customer_managed_key_id") private String storageCustomerManagedKeyId; /** A unique integer ID for the workspace */ - @JsonProperty("workspace_id") private Long workspaceId; /** The human-readable name of the workspace. */ - @JsonProperty("workspace_name") private String workspaceName; /** * The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` * initially. Continue to check the status until the status is `RUNNING`. */ - @JsonProperty("workspace_status") private WorkspaceStatus workspaceStatus; /** Message describing the current workspace status. */ - @JsonProperty("workspace_status_message") private String workspaceStatusMessage; public Workspace setAccountId(String accountId) { @@ -468,4 +455,83 @@ public String toString() { .add("workspaceStatusMessage", workspaceStatusMessage) .toString(); } + + WorkspacePb toPb() { + WorkspacePb pb = new WorkspacePb(); + pb.setAccountId(accountId); + pb.setAwsRegion(awsRegion); + pb.setAzureWorkspaceInfo(azureWorkspaceInfo); + pb.setCloud(cloud); + pb.setCloudResourceContainer(cloudResourceContainer); + pb.setCreationTime(creationTime); + pb.setCredentialsId(credentialsId); + pb.setCustomTags(customTags); + pb.setDeploymentName(deploymentName); + pb.setExternalCustomerInfo(externalCustomerInfo); + pb.setGcpManagedNetworkConfig(gcpManagedNetworkConfig); + pb.setGkeConfig(gkeConfig); + pb.setIsNoPublicIpEnabled(isNoPublicIpEnabled); + pb.setLocation(location); + pb.setManagedServicesCustomerManagedKeyId(managedServicesCustomerManagedKeyId); + pb.setNetworkId(networkId); + pb.setPricingTier(pricingTier); + pb.setPrivateAccessSettingsId(privateAccessSettingsId); + pb.setStorageConfigurationId(storageConfigurationId); + pb.setStorageCustomerManagedKeyId(storageCustomerManagedKeyId); + pb.setWorkspaceId(workspaceId); + pb.setWorkspaceName(workspaceName); + pb.setWorkspaceStatus(workspaceStatus); + pb.setWorkspaceStatusMessage(workspaceStatusMessage); + + return pb; + } + + static Workspace fromPb(WorkspacePb pb) { + Workspace model = new Workspace(); + model.setAccountId(pb.getAccountId()); + model.setAwsRegion(pb.getAwsRegion()); + model.setAzureWorkspaceInfo(pb.getAzureWorkspaceInfo()); + model.setCloud(pb.getCloud()); + model.setCloudResourceContainer(pb.getCloudResourceContainer()); + model.setCreationTime(pb.getCreationTime()); + model.setCredentialsId(pb.getCredentialsId()); + model.setCustomTags(pb.getCustomTags()); + model.setDeploymentName(pb.getDeploymentName()); + model.setExternalCustomerInfo(pb.getExternalCustomerInfo()); + model.setGcpManagedNetworkConfig(pb.getGcpManagedNetworkConfig()); + model.setGkeConfig(pb.getGkeConfig()); + model.setIsNoPublicIpEnabled(pb.getIsNoPublicIpEnabled()); + model.setLocation(pb.getLocation()); + model.setManagedServicesCustomerManagedKeyId(pb.getManagedServicesCustomerManagedKeyId()); + model.setNetworkId(pb.getNetworkId()); + model.setPricingTier(pb.getPricingTier()); + model.setPrivateAccessSettingsId(pb.getPrivateAccessSettingsId()); + model.setStorageConfigurationId(pb.getStorageConfigurationId()); + model.setStorageCustomerManagedKeyId(pb.getStorageCustomerManagedKeyId()); + model.setWorkspaceId(pb.getWorkspaceId()); + model.setWorkspaceName(pb.getWorkspaceName()); + model.setWorkspaceStatus(pb.getWorkspaceStatus()); + model.setWorkspaceStatusMessage(pb.getWorkspaceStatusMessage()); + + return model; + } + + public static class WorkspaceSerializer extends JsonSerializer { + @Override + public void serialize(Workspace value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspacePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceDeserializer extends JsonDeserializer { + @Override + public Workspace deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspacePb pb = mapper.readValue(p, WorkspacePb.class); + return Workspace.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacePb.java new file mode 100755 index 000000000..11c897d7e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacePb.java @@ -0,0 +1,392 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class WorkspacePb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("aws_region") + private String awsRegion; + + @JsonProperty("azure_workspace_info") + private AzureWorkspaceInfo azureWorkspaceInfo; + + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("cloud_resource_container") + private CloudResourceContainer cloudResourceContainer; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("credentials_id") + private String credentialsId; + + @JsonProperty("custom_tags") + private Map customTags; + + @JsonProperty("deployment_name") + private String deploymentName; + + @JsonProperty("external_customer_info") + private ExternalCustomerInfo externalCustomerInfo; + + @JsonProperty("gcp_managed_network_config") + private GcpManagedNetworkConfig gcpManagedNetworkConfig; + + @JsonProperty("gke_config") + private GkeConfig gkeConfig; + + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + + @JsonProperty("location") + private String location; + + @JsonProperty("managed_services_customer_managed_key_id") + private String managedServicesCustomerManagedKeyId; + + @JsonProperty("network_id") + private String networkId; + + @JsonProperty("pricing_tier") + private PricingTier pricingTier; + + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + + @JsonProperty("storage_configuration_id") + private String storageConfigurationId; + + @JsonProperty("storage_customer_managed_key_id") + private String storageCustomerManagedKeyId; + + @JsonProperty("workspace_id") + private Long workspaceId; + + @JsonProperty("workspace_name") + private String workspaceName; + + @JsonProperty("workspace_status") + private WorkspaceStatus workspaceStatus; + + @JsonProperty("workspace_status_message") + private String workspaceStatusMessage; + + public WorkspacePb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public WorkspacePb setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + return this; + } + + public String getAwsRegion() { + return awsRegion; + } + + public WorkspacePb setAzureWorkspaceInfo(AzureWorkspaceInfo azureWorkspaceInfo) { + this.azureWorkspaceInfo = azureWorkspaceInfo; + return this; + } + + public AzureWorkspaceInfo getAzureWorkspaceInfo() { + return azureWorkspaceInfo; + } + + public WorkspacePb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public WorkspacePb setCloudResourceContainer(CloudResourceContainer cloudResourceContainer) { + this.cloudResourceContainer = cloudResourceContainer; + return this; + } + + public CloudResourceContainer getCloudResourceContainer() { + return cloudResourceContainer; + } + + public WorkspacePb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public WorkspacePb setCredentialsId(String credentialsId) { + this.credentialsId = credentialsId; + return this; + } + + public String getCredentialsId() { + return credentialsId; + } + + public WorkspacePb setCustomTags(Map customTags) { + this.customTags = customTags; + return this; + } + + public Map getCustomTags() { + return customTags; + } + + public WorkspacePb setDeploymentName(String deploymentName) { + this.deploymentName = deploymentName; + return this; + } + + public String getDeploymentName() { + return deploymentName; + } + + public WorkspacePb setExternalCustomerInfo(ExternalCustomerInfo externalCustomerInfo) { + this.externalCustomerInfo = externalCustomerInfo; + return this; + } + + public ExternalCustomerInfo getExternalCustomerInfo() { + return externalCustomerInfo; + } + + public WorkspacePb setGcpManagedNetworkConfig(GcpManagedNetworkConfig gcpManagedNetworkConfig) { + this.gcpManagedNetworkConfig = gcpManagedNetworkConfig; + return this; + } + + public GcpManagedNetworkConfig getGcpManagedNetworkConfig() { + return gcpManagedNetworkConfig; + } + + public WorkspacePb setGkeConfig(GkeConfig gkeConfig) { + this.gkeConfig = gkeConfig; + return this; + } + + public GkeConfig getGkeConfig() { + return gkeConfig; + } + + public WorkspacePb setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + + public WorkspacePb setLocation(String location) { + this.location = location; + return this; + } + + public String getLocation() { + return location; + } + + public WorkspacePb setManagedServicesCustomerManagedKeyId( + String managedServicesCustomerManagedKeyId) { + this.managedServicesCustomerManagedKeyId = managedServicesCustomerManagedKeyId; + return this; + } + + public String getManagedServicesCustomerManagedKeyId() { + return managedServicesCustomerManagedKeyId; + } + + public WorkspacePb setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + public WorkspacePb setPricingTier(PricingTier pricingTier) { + this.pricingTier = pricingTier; + return this; + } + + public PricingTier getPricingTier() { + return pricingTier; + } + + public WorkspacePb setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + + public WorkspacePb setStorageConfigurationId(String storageConfigurationId) { + this.storageConfigurationId = storageConfigurationId; + return this; + } + + public String getStorageConfigurationId() { + return storageConfigurationId; + } + + public WorkspacePb setStorageCustomerManagedKeyId(String storageCustomerManagedKeyId) { + this.storageCustomerManagedKeyId = storageCustomerManagedKeyId; + return this; + } + + public String getStorageCustomerManagedKeyId() { + return storageCustomerManagedKeyId; + } + + public WorkspacePb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public WorkspacePb setWorkspaceName(String workspaceName) { + this.workspaceName = workspaceName; + return this; + } + + public String getWorkspaceName() { + return workspaceName; + } + + public WorkspacePb setWorkspaceStatus(WorkspaceStatus workspaceStatus) { + this.workspaceStatus = workspaceStatus; + return this; + } + + public WorkspaceStatus getWorkspaceStatus() { + return workspaceStatus; + } + + public WorkspacePb setWorkspaceStatusMessage(String workspaceStatusMessage) { + this.workspaceStatusMessage = workspaceStatusMessage; + return this; + } + + public String getWorkspaceStatusMessage() { + return workspaceStatusMessage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspacePb that = (WorkspacePb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(awsRegion, that.awsRegion) + && Objects.equals(azureWorkspaceInfo, that.azureWorkspaceInfo) + && Objects.equals(cloud, that.cloud) + && Objects.equals(cloudResourceContainer, that.cloudResourceContainer) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(customTags, that.customTags) + && Objects.equals(deploymentName, that.deploymentName) + && Objects.equals(externalCustomerInfo, that.externalCustomerInfo) + && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) + && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) + && Objects.equals(location, that.location) + && Objects.equals( + managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) + && Objects.equals(networkId, that.networkId) + && Objects.equals(pricingTier, that.pricingTier) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) + && Objects.equals(storageConfigurationId, that.storageConfigurationId) + && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) + && Objects.equals(workspaceId, that.workspaceId) + && Objects.equals(workspaceName, that.workspaceName) + && Objects.equals(workspaceStatus, that.workspaceStatus) + && Objects.equals(workspaceStatusMessage, that.workspaceStatusMessage); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + awsRegion, + azureWorkspaceInfo, + cloud, + cloudResourceContainer, + creationTime, + credentialsId, + customTags, + deploymentName, + externalCustomerInfo, + gcpManagedNetworkConfig, + gkeConfig, + isNoPublicIpEnabled, + location, + managedServicesCustomerManagedKeyId, + networkId, + pricingTier, + privateAccessSettingsId, + storageConfigurationId, + storageCustomerManagedKeyId, + workspaceId, + workspaceName, + workspaceStatus, + workspaceStatusMessage); + } + + @Override + public String toString() { + return new ToStringer(WorkspacePb.class) + .add("accountId", accountId) + .add("awsRegion", awsRegion) + .add("azureWorkspaceInfo", azureWorkspaceInfo) + .add("cloud", cloud) + .add("cloudResourceContainer", cloudResourceContainer) + .add("creationTime", creationTime) + .add("credentialsId", credentialsId) + .add("customTags", customTags) + .add("deploymentName", deploymentName) + .add("externalCustomerInfo", externalCustomerInfo) + .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) + .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) + .add("location", location) + .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) + .add("networkId", networkId) + .add("pricingTier", pricingTier) + .add("privateAccessSettingsId", privateAccessSettingsId) + .add("storageConfigurationId", storageConfigurationId) + .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) + .add("workspaceId", workspaceId) + .add("workspaceName", workspaceName) + .add("workspaceStatus", workspaceStatus) + .add("workspaceStatusMessage", workspaceStatusMessage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java index e700dac93..f1b953fa8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java @@ -22,7 +22,7 @@ public Workspace create(CreateWorkspaceRequest request) { String path = String.format("/api/2.0/accounts/%s/workspaces", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Workspace.class); @@ -39,7 +39,7 @@ public void delete(DeleteWorkspaceRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public Workspace get(GetWorkspaceRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Workspace.class); } catch (IOException e) { @@ -79,7 +79,7 @@ public void update(UpdateWorkspaceRequest request) { apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java new file mode 100755 index 000000000..9814cd933 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = AnomalyDetectionConfig.AnomalyDetectionConfigSerializer.class) +@JsonDeserialize(using = AnomalyDetectionConfig.AnomalyDetectionConfigDeserializer.class) +public class AnomalyDetectionConfig { + /** Run id of the last run of the workflow */ + private String lastRunId; + + /** The status of the last run of the workflow. */ + private AnomalyDetectionRunStatus latestRunStatus; + + public AnomalyDetectionConfig setLastRunId(String lastRunId) { + this.lastRunId = lastRunId; + return this; + } + + public String getLastRunId() { + return lastRunId; + } + + public AnomalyDetectionConfig setLatestRunStatus(AnomalyDetectionRunStatus latestRunStatus) { + this.latestRunStatus = latestRunStatus; + return this; + } + + public AnomalyDetectionRunStatus getLatestRunStatus() { + return latestRunStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; + return Objects.equals(lastRunId, that.lastRunId) + && Objects.equals(latestRunStatus, that.latestRunStatus); + } + + @Override + public int hashCode() { + return Objects.hash(lastRunId, latestRunStatus); + } + + @Override + public String toString() { + return new ToStringer(AnomalyDetectionConfig.class) + .add("lastRunId", lastRunId) + .add("latestRunStatus", latestRunStatus) + .toString(); + } + + AnomalyDetectionConfigPb toPb() { + AnomalyDetectionConfigPb pb = new AnomalyDetectionConfigPb(); + pb.setLastRunId(lastRunId); + pb.setLatestRunStatus(latestRunStatus); + + return pb; + } + + static AnomalyDetectionConfig fromPb(AnomalyDetectionConfigPb pb) { + AnomalyDetectionConfig model = new AnomalyDetectionConfig(); + model.setLastRunId(pb.getLastRunId()); + model.setLatestRunStatus(pb.getLatestRunStatus()); + + return model; + } + + public static class AnomalyDetectionConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + AnomalyDetectionConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AnomalyDetectionConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AnomalyDetectionConfigDeserializer + extends JsonDeserializer { + @Override + public AnomalyDetectionConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AnomalyDetectionConfigPb pb = mapper.readValue(p, AnomalyDetectionConfigPb.class); + return AnomalyDetectionConfig.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfigPb.java new file mode 100755 index 000000000..0e8a7bdd3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AnomalyDetectionConfigPb { + @JsonProperty("last_run_id") + private String lastRunId; + + @JsonProperty("latest_run_status") + private AnomalyDetectionRunStatus latestRunStatus; + + public AnomalyDetectionConfigPb setLastRunId(String lastRunId) { + this.lastRunId = lastRunId; + return this; + } + + public String getLastRunId() { + return lastRunId; + } + + public AnomalyDetectionConfigPb setLatestRunStatus(AnomalyDetectionRunStatus latestRunStatus) { + this.latestRunStatus = latestRunStatus; + return this; + } + + public AnomalyDetectionRunStatus getLatestRunStatus() { + return latestRunStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnomalyDetectionConfigPb that = (AnomalyDetectionConfigPb) o; + return Objects.equals(lastRunId, that.lastRunId) + && Objects.equals(latestRunStatus, that.latestRunStatus); + } + + @Override + public int hashCode() { + return Objects.hash(lastRunId, latestRunStatus); + } + + @Override + public String toString() { + return new ToStringer(AnomalyDetectionConfigPb.class) + .add("lastRunId", lastRunId) + .add("latestRunStatus", latestRunStatus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java new file mode 100755 index 000000000..8de4b6bb6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java @@ -0,0 +1,18 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; + +/** Status of Anomaly Detection Job Run */ +@Generated +public enum AnomalyDetectionRunStatus { + ANOMALY_DETECTION_RUN_STATUS_CANCELED, + ANOMALY_DETECTION_RUN_STATUS_FAILED, + ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED, + ANOMALY_DETECTION_RUN_STATUS_PENDING, + ANOMALY_DETECTION_RUN_STATUS_RUNNING, + ANOMALY_DETECTION_RUN_STATUS_SUCCESS, + ANOMALY_DETECTION_RUN_STATUS_UNKNOWN, + ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/Converters.java new file mode 100755 index 000000000..320000cb4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.qualitymonitorv2; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java new file mode 100755 index 000000000..0ce772561 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Create a quality monitor */ +@Generated +@JsonSerialize(using = CreateQualityMonitorRequest.CreateQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = CreateQualityMonitorRequest.CreateQualityMonitorRequestDeserializer.class) +public class CreateQualityMonitorRequest { + /** */ + private QualityMonitor qualityMonitor; + + public CreateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQualityMonitorRequest that = (CreateQualityMonitorRequest) o; + return Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(CreateQualityMonitorRequest.class) + .add("qualityMonitor", qualityMonitor) + .toString(); + } + + CreateQualityMonitorRequestPb toPb() { + CreateQualityMonitorRequestPb pb = new CreateQualityMonitorRequestPb(); + pb.setQualityMonitor(qualityMonitor); + + return pb; + } + + static CreateQualityMonitorRequest fromPb(CreateQualityMonitorRequestPb pb) { + CreateQualityMonitorRequest model = new CreateQualityMonitorRequest(); + model.setQualityMonitor(pb.getQualityMonitor()); + + return model; + } + + public static class CreateQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public CreateQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateQualityMonitorRequestPb pb = mapper.readValue(p, CreateQualityMonitorRequestPb.class); + return CreateQualityMonitorRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequestPb.java new file mode 100755 index 000000000..a66e92cfd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a quality monitor */ +@Generated +class CreateQualityMonitorRequestPb { + @JsonProperty("quality_monitor") + private QualityMonitor qualityMonitor; + + public CreateQualityMonitorRequestPb setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQualityMonitorRequestPb that = (CreateQualityMonitorRequestPb) o; + return Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(CreateQualityMonitorRequestPb.class) + .add("qualityMonitor", qualityMonitor) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java new file mode 100755 index 000000000..ae8b523a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Delete a quality monitor */ +@Generated +@JsonSerialize(using = DeleteQualityMonitorRequest.DeleteQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = DeleteQualityMonitorRequest.DeleteQualityMonitorRequestDeserializer.class) +public class DeleteQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + private String objectType; + + public DeleteQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public DeleteQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQualityMonitorRequest that = (DeleteQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } + + DeleteQualityMonitorRequestPb toPb() { + DeleteQualityMonitorRequestPb pb = new DeleteQualityMonitorRequestPb(); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static DeleteQualityMonitorRequest fromPb(DeleteQualityMonitorRequestPb pb) { + DeleteQualityMonitorRequest model = new DeleteQualityMonitorRequest(); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class DeleteQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteQualityMonitorRequestPb pb = mapper.readValue(p, DeleteQualityMonitorRequestPb.class); + return DeleteQualityMonitorRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequestPb.java new file mode 100755 index 000000000..b2deba4cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a quality monitor */ +@Generated +class DeleteQualityMonitorRequestPb { + @JsonIgnore private String objectId; + + @JsonIgnore private String objectType; + + public DeleteQualityMonitorRequestPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public DeleteQualityMonitorRequestPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQualityMonitorRequestPb that = (DeleteQualityMonitorRequestPb) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorRequestPb.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java new file mode 100755 index 000000000..1d8b8ea84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = DeleteQualityMonitorResponse.DeleteQualityMonitorResponseSerializer.class) +@JsonDeserialize( + using = DeleteQualityMonitorResponse.DeleteQualityMonitorResponseDeserializer.class) +public class DeleteQualityMonitorResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorResponse.class).toString(); + } + + DeleteQualityMonitorResponsePb toPb() { + DeleteQualityMonitorResponsePb pb = new DeleteQualityMonitorResponsePb(); + + return pb; + } + + static DeleteQualityMonitorResponse fromPb(DeleteQualityMonitorResponsePb pb) { + DeleteQualityMonitorResponse model = new DeleteQualityMonitorResponse(); + + return model; + } + + public static class DeleteQualityMonitorResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteQualityMonitorResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteQualityMonitorResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteQualityMonitorResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteQualityMonitorResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteQualityMonitorResponsePb pb = mapper.readValue(p, DeleteQualityMonitorResponsePb.class); + return DeleteQualityMonitorResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponsePb.java new file mode 100755 index 000000000..42b554c63 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteQualityMonitorResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java new file mode 100755 index 000000000..f02e57c41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Read a quality monitor */ +@Generated +@JsonSerialize(using = GetQualityMonitorRequest.GetQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = GetQualityMonitorRequest.GetQualityMonitorRequestDeserializer.class) +public class GetQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + private String objectType; + + public GetQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQualityMonitorRequest that = (GetQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } + + GetQualityMonitorRequestPb toPb() { + GetQualityMonitorRequestPb pb = new GetQualityMonitorRequestPb(); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static GetQualityMonitorRequest fromPb(GetQualityMonitorRequestPb pb) { + GetQualityMonitorRequest model = new GetQualityMonitorRequest(); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class GetQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public GetQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQualityMonitorRequestPb pb = mapper.readValue(p, GetQualityMonitorRequestPb.class); + return GetQualityMonitorRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequestPb.java new file mode 100755 index 000000000..0df804de6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Read a quality monitor */ +@Generated +class GetQualityMonitorRequestPb { + @JsonIgnore private String objectId; + + @JsonIgnore private String objectType; + + public GetQualityMonitorRequestPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetQualityMonitorRequestPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQualityMonitorRequestPb that = (GetQualityMonitorRequestPb) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetQualityMonitorRequestPb.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java new file mode 100755 index 000000000..3437169db --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** List quality monitors */ +@Generated +@JsonSerialize(using = ListQualityMonitorRequest.ListQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = ListQualityMonitorRequest.ListQualityMonitorRequestDeserializer.class) +public class ListQualityMonitorRequest { + /** */ + private Long pageSize; + + /** */ + private String pageToken; + + public ListQualityMonitorRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListQualityMonitorRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorRequest that = (ListQualityMonitorRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } + + ListQualityMonitorRequestPb toPb() { + ListQualityMonitorRequestPb pb = new ListQualityMonitorRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListQualityMonitorRequest fromPb(ListQualityMonitorRequestPb pb) { + ListQualityMonitorRequest model = new ListQualityMonitorRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public ListQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQualityMonitorRequestPb pb = mapper.readValue(p, ListQualityMonitorRequestPb.class); + return ListQualityMonitorRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequestPb.java new file mode 100755 index 000000000..bae928401 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List quality monitors */ +@Generated +class ListQualityMonitorRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListQualityMonitorRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListQualityMonitorRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorRequestPb that = (ListQualityMonitorRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java new file mode 100755 index 000000000..8f2f553a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +@Generated +@JsonSerialize(using = ListQualityMonitorResponse.ListQualityMonitorResponseSerializer.class) +@JsonDeserialize(using = ListQualityMonitorResponse.ListQualityMonitorResponseDeserializer.class) +public class ListQualityMonitorResponse { + /** */ + private String nextPageToken; + + /** */ + private Collection qualityMonitors; + + public ListQualityMonitorResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQualityMonitorResponse setQualityMonitors(Collection qualityMonitors) { + this.qualityMonitors = qualityMonitors; + return this; + } + + public Collection getQualityMonitors() { + return qualityMonitors; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorResponse that = (ListQualityMonitorResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(qualityMonitors, that.qualityMonitors); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, qualityMonitors); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorResponse.class) + .add("nextPageToken", nextPageToken) + .add("qualityMonitors", qualityMonitors) + .toString(); + } + + ListQualityMonitorResponsePb toPb() { + ListQualityMonitorResponsePb pb = new ListQualityMonitorResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setQualityMonitors(qualityMonitors); + + return pb; + } + + static ListQualityMonitorResponse fromPb(ListQualityMonitorResponsePb pb) { + ListQualityMonitorResponse model = new ListQualityMonitorResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setQualityMonitors(pb.getQualityMonitors()); + + return model; + } + + public static class ListQualityMonitorResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListQualityMonitorResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQualityMonitorResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQualityMonitorResponseDeserializer + extends JsonDeserializer { + @Override + public ListQualityMonitorResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQualityMonitorResponsePb pb = mapper.readValue(p, ListQualityMonitorResponsePb.class); + return ListQualityMonitorResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponsePb.java new file mode 100755 index 000000000..f774c6474 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListQualityMonitorResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("quality_monitors") + private Collection qualityMonitors; + + public ListQualityMonitorResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQualityMonitorResponsePb setQualityMonitors( + Collection qualityMonitors) { + this.qualityMonitors = qualityMonitors; + return this; + } + + public Collection getQualityMonitors() { + return qualityMonitors; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorResponsePb that = (ListQualityMonitorResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(qualityMonitors, that.qualityMonitors); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, qualityMonitors); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("qualityMonitors", qualityMonitors) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java new file mode 100755 index 000000000..05c84e286 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = QualityMonitor.QualityMonitorSerializer.class) +@JsonDeserialize(using = QualityMonitor.QualityMonitorDeserializer.class) +public class QualityMonitor { + /** */ + private AnomalyDetectionConfig anomalyDetectionConfig; + + /** The uuid of the request object. For example, schema id. */ + private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + private String objectType; + + public QualityMonitor setAnomalyDetectionConfig(AnomalyDetectionConfig anomalyDetectionConfig) { + this.anomalyDetectionConfig = anomalyDetectionConfig; + return this; + } + + public AnomalyDetectionConfig getAnomalyDetectionConfig() { + return anomalyDetectionConfig; + } + + public QualityMonitor setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public QualityMonitor setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QualityMonitor that = (QualityMonitor) o; + return Objects.equals(anomalyDetectionConfig, that.anomalyDetectionConfig) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(anomalyDetectionConfig, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(QualityMonitor.class) + .add("anomalyDetectionConfig", anomalyDetectionConfig) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } + + QualityMonitorPb toPb() { + QualityMonitorPb pb = new QualityMonitorPb(); + pb.setAnomalyDetectionConfig(anomalyDetectionConfig); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static QualityMonitor fromPb(QualityMonitorPb pb) { + QualityMonitor model = new QualityMonitor(); + model.setAnomalyDetectionConfig(pb.getAnomalyDetectionConfig()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class QualityMonitorSerializer extends JsonSerializer { + @Override + public void serialize(QualityMonitor value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QualityMonitorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QualityMonitorDeserializer extends JsonDeserializer { + @Override + public QualityMonitor deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QualityMonitorPb pb = mapper.readValue(p, QualityMonitorPb.class); + return QualityMonitor.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorPb.java new file mode 100755 index 000000000..042392255 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QualityMonitorPb { + @JsonProperty("anomaly_detection_config") + private AnomalyDetectionConfig anomalyDetectionConfig; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public QualityMonitorPb setAnomalyDetectionConfig(AnomalyDetectionConfig anomalyDetectionConfig) { + this.anomalyDetectionConfig = anomalyDetectionConfig; + return this; + } + + public AnomalyDetectionConfig getAnomalyDetectionConfig() { + return anomalyDetectionConfig; + } + + public QualityMonitorPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public QualityMonitorPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QualityMonitorPb that = (QualityMonitorPb) o; + return Objects.equals(anomalyDetectionConfig, that.anomalyDetectionConfig) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(anomalyDetectionConfig, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(QualityMonitorPb.class) + .add("anomalyDetectionConfig", anomalyDetectionConfig) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java new file mode 100755 index 000000000..b0a02ea48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java @@ -0,0 +1,109 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage data quality of UC objects (currently support `schema`) */ +@Generated +public class QualityMonitorV2API { + private static final Logger LOG = LoggerFactory.getLogger(QualityMonitorV2API.class); + + private final QualityMonitorV2Service impl; + + /** Regular-use constructor */ + public QualityMonitorV2API(ApiClient apiClient) { + impl = new QualityMonitorV2Impl(apiClient); + } + + /** Constructor for mocks */ + public QualityMonitorV2API(QualityMonitorV2Service mock) { + impl = mock; + } + + public QualityMonitor createQualityMonitor(QualityMonitor qualityMonitor) { + return createQualityMonitor( + new CreateQualityMonitorRequest().setQualityMonitor(qualityMonitor)); + } + + /** + * Create a quality monitor. + * + *

Create a quality monitor on UC object + */ + public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) { + return impl.createQualityMonitor(request); + } + + public void deleteQualityMonitor(String objectType, String objectId) { + deleteQualityMonitor( + new DeleteQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Delete a quality monitor. + * + *

Delete a quality monitor on UC object + */ + public void deleteQualityMonitor(DeleteQualityMonitorRequest request) { + impl.deleteQualityMonitor(request); + } + + public QualityMonitor getQualityMonitor(String objectType, String objectId) { + return getQualityMonitor( + new GetQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Read a quality monitor. + * + *

Read a quality monitor on UC object + */ + public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) { + return impl.getQualityMonitor(request); + } + + /** + * List quality monitors. + * + *

(Unimplemented) List quality monitors + */ + public Iterable listQualityMonitor(ListQualityMonitorRequest request) { + return new Paginator<>( + request, + impl::listQualityMonitor, + ListQualityMonitorResponse::getQualityMonitors, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public QualityMonitor updateQualityMonitor( + String objectType, String objectId, QualityMonitor qualityMonitor) { + return updateQualityMonitor( + new UpdateQualityMonitorRequest() + .setObjectType(objectType) + .setObjectId(objectId) + .setQualityMonitor(qualityMonitor)); + } + + /** + * Update a quality monitor. + * + *

(Unimplemented) Update a quality monitor on UC object + */ + public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) { + return impl.updateQualityMonitor(request); + } + + public QualityMonitorV2Service impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java new file mode 100755 index 000000000..974b3340f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of QualityMonitorV2 */ +@Generated +class QualityMonitorV2Impl implements QualityMonitorV2Service { + private final ApiClient apiClient; + + public QualityMonitorV2Impl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) { + String path = "/api/2.0/quality-monitors"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getQualityMonitor())); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteQualityMonitor(DeleteQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteQualityMonitorResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListQualityMonitorResponse listQualityMonitor(ListQualityMonitorRequest request) { + String path = "/api/2.0/quality-monitors"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListQualityMonitorResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("PUT", path, apiClient.serialize(request.getQualityMonitor())); + ApiClient.setQuery(req, request.toPb()); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java new file mode 100755 index 000000000..762b01606 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; + +/** + * Manage data quality of UC objects (currently support `schema`) + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface QualityMonitorV2Service { + /** + * Create a quality monitor. + * + *

Create a quality monitor on UC object + */ + QualityMonitor createQualityMonitor(CreateQualityMonitorRequest createQualityMonitorRequest); + + /** + * Delete a quality monitor. + * + *

Delete a quality monitor on UC object + */ + void deleteQualityMonitor(DeleteQualityMonitorRequest deleteQualityMonitorRequest); + + /** + * Read a quality monitor. + * + *

Read a quality monitor on UC object + */ + QualityMonitor getQualityMonitor(GetQualityMonitorRequest getQualityMonitorRequest); + + /** + * List quality monitors. + * + *

(Unimplemented) List quality monitors + */ + ListQualityMonitorResponse listQualityMonitor( + ListQualityMonitorRequest listQualityMonitorRequest); + + /** + * Update a quality monitor. + * + *

(Unimplemented) Update a quality monitor on UC object + */ + QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest updateQualityMonitorRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java new file mode 100755 index 000000000..cb7af169a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java @@ -0,0 +1,124 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Update a quality monitor */ +@Generated +@JsonSerialize(using = UpdateQualityMonitorRequest.UpdateQualityMonitorRequestSerializer.class) +@JsonDeserialize(using = UpdateQualityMonitorRequest.UpdateQualityMonitorRequestDeserializer.class) +public class UpdateQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + private String objectType; + + /** */ + private QualityMonitor qualityMonitor; + + public UpdateQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public UpdateQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public UpdateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateQualityMonitorRequest that = (UpdateQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(UpdateQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("qualityMonitor", qualityMonitor) + .toString(); + } + + UpdateQualityMonitorRequestPb toPb() { + UpdateQualityMonitorRequestPb pb = new UpdateQualityMonitorRequestPb(); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + pb.setQualityMonitor(qualityMonitor); + + return pb; + } + + static UpdateQualityMonitorRequest fromPb(UpdateQualityMonitorRequestPb pb) { + UpdateQualityMonitorRequest model = new UpdateQualityMonitorRequest(); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + model.setQualityMonitor(pb.getQualityMonitor()); + + return model; + } + + public static class UpdateQualityMonitorRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateQualityMonitorRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateQualityMonitorRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateQualityMonitorRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateQualityMonitorRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateQualityMonitorRequestPb pb = mapper.readValue(p, UpdateQualityMonitorRequestPb.class); + return UpdateQualityMonitorRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequestPb.java new file mode 100755 index 000000000..c992cc597 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a quality monitor */ +@Generated +class UpdateQualityMonitorRequestPb { + @JsonIgnore private String objectId; + + @JsonIgnore private String objectType; + + @JsonProperty("quality_monitor") + private QualityMonitor qualityMonitor; + + public UpdateQualityMonitorRequestPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public UpdateQualityMonitorRequestPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public UpdateQualityMonitorRequestPb setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateQualityMonitorRequestPb that = (UpdateQualityMonitorRequestPb) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(UpdateQualityMonitorRequestPb.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("qualityMonitor", qualityMonitor) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java index 074fb8e5c..2973b244e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Ai21LabsConfig.Ai21LabsConfigSerializer.class) +@JsonDeserialize(using = Ai21LabsConfig.Ai21LabsConfigDeserializer.class) public class Ai21LabsConfig { /** * The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API * key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the * following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`. */ - @JsonProperty("ai21labs_api_key") private String ai21labsApiKey; /** @@ -22,7 +32,6 @@ public class Ai21LabsConfig { * Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the * following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`. */ - @JsonProperty("ai21labs_api_key_plaintext") private String ai21labsApiKeyPlaintext; public Ai21LabsConfig setAi21labsApiKey(String ai21labsApiKey) { @@ -64,4 +73,40 @@ public String toString() { .add("ai21labsApiKeyPlaintext", ai21labsApiKeyPlaintext) .toString(); } + + Ai21LabsConfigPb toPb() { + Ai21LabsConfigPb pb = new Ai21LabsConfigPb(); + pb.setAi21labsApiKey(ai21labsApiKey); + pb.setAi21labsApiKeyPlaintext(ai21labsApiKeyPlaintext); + + return pb; + } + + static Ai21LabsConfig fromPb(Ai21LabsConfigPb pb) { + Ai21LabsConfig model = new Ai21LabsConfig(); + model.setAi21labsApiKey(pb.getAi21labsApiKey()); + model.setAi21labsApiKeyPlaintext(pb.getAi21labsApiKeyPlaintext()); + + return model; + } + + public static class Ai21LabsConfigSerializer extends JsonSerializer { + @Override + public void serialize(Ai21LabsConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + Ai21LabsConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class Ai21LabsConfigDeserializer extends JsonDeserializer { + @Override + public Ai21LabsConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + Ai21LabsConfigPb pb = mapper.readValue(p, Ai21LabsConfigPb.class); + return Ai21LabsConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfigPb.java new file mode 100755 index 000000000..77faee74c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class Ai21LabsConfigPb { + @JsonProperty("ai21labs_api_key") + private String ai21labsApiKey; + + @JsonProperty("ai21labs_api_key_plaintext") + private String ai21labsApiKeyPlaintext; + + public Ai21LabsConfigPb setAi21labsApiKey(String ai21labsApiKey) { + this.ai21labsApiKey = ai21labsApiKey; + return this; + } + + public String getAi21labsApiKey() { + return ai21labsApiKey; + } + + public Ai21LabsConfigPb setAi21labsApiKeyPlaintext(String ai21labsApiKeyPlaintext) { + this.ai21labsApiKeyPlaintext = ai21labsApiKeyPlaintext; + return this; + } + + public String getAi21labsApiKeyPlaintext() { + return ai21labsApiKeyPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Ai21LabsConfigPb that = (Ai21LabsConfigPb) o; + return Objects.equals(ai21labsApiKey, that.ai21labsApiKey) + && Objects.equals(ai21labsApiKeyPlaintext, that.ai21labsApiKeyPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(ai21labsApiKey, ai21labsApiKeyPlaintext); + } + + @Override + public String toString() { + return new ToStringer(Ai21LabsConfigPb.class) + .add("ai21labsApiKey", ai21labsApiKey) + .add("ai21labsApiKeyPlaintext", ai21labsApiKeyPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java index 6c957db0a..533490d20 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java @@ -4,42 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayConfig.AiGatewayConfigSerializer.class) +@JsonDeserialize(using = AiGatewayConfig.AiGatewayConfigDeserializer.class) public class AiGatewayConfig { /** * Configuration for traffic fallback which auto fallbacks to other served entities if the request * to a served entity fails with certain error codes, to increase availability. */ - @JsonProperty("fallback_config") private FallbackConfig fallbackConfig; /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. */ - @JsonProperty("guardrails") private AiGatewayGuardrails guardrails; /** * Configuration for payload logging using inference tables. Use these tables to monitor and audit * data being sent to and received from model APIs and to improve model quality. */ - @JsonProperty("inference_table_config") private AiGatewayInferenceTableConfig inferenceTableConfig; /** Configuration for rate limits which can be set to limit endpoint traffic. */ - @JsonProperty("rate_limits") private Collection rateLimits; /** * Configuration to enable usage tracking using system tables. These tables allow you to monitor * operational usage on endpoints and their associated costs. */ - @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; public AiGatewayConfig setFallbackConfig(FallbackConfig fallbackConfig) { @@ -116,4 +122,46 @@ public String toString() { .add("usageTrackingConfig", usageTrackingConfig) .toString(); } + + AiGatewayConfigPb toPb() { + AiGatewayConfigPb pb = new AiGatewayConfigPb(); + pb.setFallbackConfig(fallbackConfig); + pb.setGuardrails(guardrails); + pb.setInferenceTableConfig(inferenceTableConfig); + pb.setRateLimits(rateLimits); + pb.setUsageTrackingConfig(usageTrackingConfig); + + return pb; + } + + static AiGatewayConfig fromPb(AiGatewayConfigPb pb) { + AiGatewayConfig model = new AiGatewayConfig(); + model.setFallbackConfig(pb.getFallbackConfig()); + model.setGuardrails(pb.getGuardrails()); + model.setInferenceTableConfig(pb.getInferenceTableConfig()); + model.setRateLimits(pb.getRateLimits()); + model.setUsageTrackingConfig(pb.getUsageTrackingConfig()); + + return model; + } + + public static class AiGatewayConfigSerializer extends JsonSerializer { + @Override + public void serialize(AiGatewayConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayConfigDeserializer extends JsonDeserializer { + @Override + public AiGatewayConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayConfigPb pb = mapper.readValue(p, AiGatewayConfigPb.class); + return AiGatewayConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfigPb.java new file mode 100755 index 000000000..707d79c30 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfigPb.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AiGatewayConfigPb { + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + + @JsonProperty("guardrails") + private AiGatewayGuardrails guardrails; + + @JsonProperty("inference_table_config") + private AiGatewayInferenceTableConfig inferenceTableConfig; + + @JsonProperty("rate_limits") + private Collection rateLimits; + + @JsonProperty("usage_tracking_config") + private AiGatewayUsageTrackingConfig usageTrackingConfig; + + public AiGatewayConfigPb setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + + public AiGatewayConfigPb setGuardrails(AiGatewayGuardrails guardrails) { + this.guardrails = guardrails; + return this; + } + + public AiGatewayGuardrails getGuardrails() { + return guardrails; + } + + public AiGatewayConfigPb setInferenceTableConfig( + AiGatewayInferenceTableConfig inferenceTableConfig) { + this.inferenceTableConfig = inferenceTableConfig; + return this; + } + + public AiGatewayInferenceTableConfig getInferenceTableConfig() { + return inferenceTableConfig; + } + + public AiGatewayConfigPb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + public AiGatewayConfigPb setUsageTrackingConfig( + AiGatewayUsageTrackingConfig usageTrackingConfig) { + this.usageTrackingConfig = usageTrackingConfig; + return this; + } + + public AiGatewayUsageTrackingConfig getUsageTrackingConfig() { + return usageTrackingConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayConfigPb that = (AiGatewayConfigPb) o; + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) + && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) + && Objects.equals(rateLimits, that.rateLimits) + && Objects.equals(usageTrackingConfig, that.usageTrackingConfig); + } + + @Override + public int hashCode() { + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayConfigPb.class) + .add("fallbackConfig", fallbackConfig) + .add("guardrails", guardrails) + .add("inferenceTableConfig", inferenceTableConfig) + .add("rateLimits", rateLimits) + .add("usageTrackingConfig", usageTrackingConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java index 2d7a5a9e1..4919844db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java @@ -4,32 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayGuardrailParameters.AiGatewayGuardrailParametersSerializer.class) +@JsonDeserialize( + using = AiGatewayGuardrailParameters.AiGatewayGuardrailParametersDeserializer.class) public class AiGatewayGuardrailParameters { /** * List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword * exists in the request or response content. */ - @JsonProperty("invalid_keywords") private Collection invalidKeywords; /** Configuration for guardrail PII filter. */ - @JsonProperty("pii") private AiGatewayGuardrailPiiBehavior pii; /** Indicates whether the safety filter is enabled. */ - @JsonProperty("safety") private Boolean safety; /** * The list of allowed topics. Given a chat request, this guardrail flags the request if its topic * is not in the allowed topics. */ - @JsonProperty("valid_topics") private Collection validTopics; public AiGatewayGuardrailParameters setInvalidKeywords(Collection invalidKeywords) { @@ -93,4 +101,47 @@ public String toString() { .add("validTopics", validTopics) .toString(); } + + AiGatewayGuardrailParametersPb toPb() { + AiGatewayGuardrailParametersPb pb = new AiGatewayGuardrailParametersPb(); + pb.setInvalidKeywords(invalidKeywords); + pb.setPii(pii); + pb.setSafety(safety); + pb.setValidTopics(validTopics); + + return pb; + } + + static AiGatewayGuardrailParameters fromPb(AiGatewayGuardrailParametersPb pb) { + AiGatewayGuardrailParameters model = new AiGatewayGuardrailParameters(); + model.setInvalidKeywords(pb.getInvalidKeywords()); + model.setPii(pb.getPii()); + model.setSafety(pb.getSafety()); + model.setValidTopics(pb.getValidTopics()); + + return model; + } + + public static class AiGatewayGuardrailParametersSerializer + extends JsonSerializer { + @Override + public void serialize( + AiGatewayGuardrailParameters value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayGuardrailParametersPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayGuardrailParametersDeserializer + extends JsonDeserializer { + @Override + public AiGatewayGuardrailParameters deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayGuardrailParametersPb pb = mapper.readValue(p, AiGatewayGuardrailParametersPb.class); + return AiGatewayGuardrailParameters.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParametersPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParametersPb.java new file mode 100755 index 000000000..65a8219b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParametersPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AiGatewayGuardrailParametersPb { + @JsonProperty("invalid_keywords") + private Collection invalidKeywords; + + @JsonProperty("pii") + private AiGatewayGuardrailPiiBehavior pii; + + @JsonProperty("safety") + private Boolean safety; + + @JsonProperty("valid_topics") + private Collection validTopics; + + public AiGatewayGuardrailParametersPb setInvalidKeywords(Collection invalidKeywords) { + this.invalidKeywords = invalidKeywords; + return this; + } + + public Collection getInvalidKeywords() { + return invalidKeywords; + } + + public AiGatewayGuardrailParametersPb setPii(AiGatewayGuardrailPiiBehavior pii) { + this.pii = pii; + return this; + } + + public AiGatewayGuardrailPiiBehavior getPii() { + return pii; + } + + public AiGatewayGuardrailParametersPb setSafety(Boolean safety) { + this.safety = safety; + return this; + } + + public Boolean getSafety() { + return safety; + } + + public AiGatewayGuardrailParametersPb setValidTopics(Collection validTopics) { + this.validTopics = validTopics; + return this; + } + + public Collection getValidTopics() { + return validTopics; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayGuardrailParametersPb that = (AiGatewayGuardrailParametersPb) o; + return Objects.equals(invalidKeywords, that.invalidKeywords) + && Objects.equals(pii, that.pii) + && Objects.equals(safety, that.safety) + && Objects.equals(validTopics, that.validTopics); + } + + @Override + public int hashCode() { + return Objects.hash(invalidKeywords, pii, safety, validTopics); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayGuardrailParametersPb.class) + .add("invalidKeywords", invalidKeywords) + .add("pii", pii) + .add("safety", safety) + .add("validTopics", validTopics) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java index 3407fd4c8..f5096e897 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayGuardrailPiiBehavior.AiGatewayGuardrailPiiBehaviorSerializer.class) +@JsonDeserialize( + using = AiGatewayGuardrailPiiBehavior.AiGatewayGuardrailPiiBehaviorDeserializer.class) public class AiGatewayGuardrailPiiBehavior { /** Configuration for input guardrail filters. */ - @JsonProperty("behavior") private AiGatewayGuardrailPiiBehaviorBehavior behavior; public AiGatewayGuardrailPiiBehavior setBehavior(AiGatewayGuardrailPiiBehaviorBehavior behavior) { @@ -39,4 +50,42 @@ public int hashCode() { public String toString() { return new ToStringer(AiGatewayGuardrailPiiBehavior.class).add("behavior", behavior).toString(); } + + AiGatewayGuardrailPiiBehaviorPb toPb() { + AiGatewayGuardrailPiiBehaviorPb pb = new AiGatewayGuardrailPiiBehaviorPb(); + pb.setBehavior(behavior); + + return pb; + } + + static AiGatewayGuardrailPiiBehavior fromPb(AiGatewayGuardrailPiiBehaviorPb pb) { + AiGatewayGuardrailPiiBehavior model = new AiGatewayGuardrailPiiBehavior(); + model.setBehavior(pb.getBehavior()); + + return model; + } + + public static class AiGatewayGuardrailPiiBehaviorSerializer + extends JsonSerializer { + @Override + public void serialize( + AiGatewayGuardrailPiiBehavior value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayGuardrailPiiBehaviorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayGuardrailPiiBehaviorDeserializer + extends JsonDeserializer { + @Override + public AiGatewayGuardrailPiiBehavior deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayGuardrailPiiBehaviorPb pb = + mapper.readValue(p, AiGatewayGuardrailPiiBehaviorPb.class); + return AiGatewayGuardrailPiiBehavior.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorPb.java new file mode 100755 index 000000000..94332dcb2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AiGatewayGuardrailPiiBehaviorPb { + @JsonProperty("behavior") + private AiGatewayGuardrailPiiBehaviorBehavior behavior; + + public AiGatewayGuardrailPiiBehaviorPb setBehavior( + AiGatewayGuardrailPiiBehaviorBehavior behavior) { + this.behavior = behavior; + return this; + } + + public AiGatewayGuardrailPiiBehaviorBehavior getBehavior() { + return behavior; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayGuardrailPiiBehaviorPb that = (AiGatewayGuardrailPiiBehaviorPb) o; + return Objects.equals(behavior, that.behavior); + } + + @Override + public int hashCode() { + return Objects.hash(behavior); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayGuardrailPiiBehaviorPb.class) + .add("behavior", behavior) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrails.java index 51c414358..1926356bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrails.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayGuardrails.AiGatewayGuardrailsSerializer.class) +@JsonDeserialize(using = AiGatewayGuardrails.AiGatewayGuardrailsDeserializer.class) public class AiGatewayGuardrails { /** Configuration for input guardrail filters. */ - @JsonProperty("input") private AiGatewayGuardrailParameters input; /** Configuration for output guardrail filters. */ - @JsonProperty("output") private AiGatewayGuardrailParameters output; public AiGatewayGuardrails setInput(AiGatewayGuardrailParameters input) { @@ -55,4 +64,41 @@ public String toString() { .add("output", output) .toString(); } + + AiGatewayGuardrailsPb toPb() { + AiGatewayGuardrailsPb pb = new AiGatewayGuardrailsPb(); + pb.setInput(input); + pb.setOutput(output); + + return pb; + } + + static AiGatewayGuardrails fromPb(AiGatewayGuardrailsPb pb) { + AiGatewayGuardrails model = new AiGatewayGuardrails(); + model.setInput(pb.getInput()); + model.setOutput(pb.getOutput()); + + return model; + } + + public static class AiGatewayGuardrailsSerializer extends JsonSerializer { + @Override + public void serialize(AiGatewayGuardrails value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayGuardrailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayGuardrailsDeserializer + extends JsonDeserializer { + @Override + public AiGatewayGuardrails deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayGuardrailsPb pb = mapper.readValue(p, AiGatewayGuardrailsPb.class); + return AiGatewayGuardrails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailsPb.java new file mode 100755 index 000000000..e41147ed2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailsPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AiGatewayGuardrailsPb { + @JsonProperty("input") + private AiGatewayGuardrailParameters input; + + @JsonProperty("output") + private AiGatewayGuardrailParameters output; + + public AiGatewayGuardrailsPb setInput(AiGatewayGuardrailParameters input) { + this.input = input; + return this; + } + + public AiGatewayGuardrailParameters getInput() { + return input; + } + + public AiGatewayGuardrailsPb setOutput(AiGatewayGuardrailParameters output) { + this.output = output; + return this; + } + + public AiGatewayGuardrailParameters getOutput() { + return output; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayGuardrailsPb that = (AiGatewayGuardrailsPb) o; + return Objects.equals(input, that.input) && Objects.equals(output, that.output); + } + + @Override + public int hashCode() { + return Objects.hash(input, output); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayGuardrailsPb.class) + .add("input", input) + .add("output", output) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfig.java index 42475d18b..f25b1fd05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfig.java @@ -4,34 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayInferenceTableConfig.AiGatewayInferenceTableConfigSerializer.class) +@JsonDeserialize( + using = AiGatewayInferenceTableConfig.AiGatewayInferenceTableConfigDeserializer.class) public class AiGatewayInferenceTableConfig { /** * The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On * update, you have to disable inference table first in order to change the catalog name. */ - @JsonProperty("catalog_name") private String catalogName; /** Indicates whether the inference table is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** * The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On * update, you have to disable inference table first in order to change the schema name. */ - @JsonProperty("schema_name") private String schemaName; /** * The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table * first in order to change the prefix name. */ - @JsonProperty("table_name_prefix") private String tableNamePrefix; public AiGatewayInferenceTableConfig setCatalogName(String catalogName) { @@ -95,4 +103,48 @@ public String toString() { .add("tableNamePrefix", tableNamePrefix) .toString(); } + + AiGatewayInferenceTableConfigPb toPb() { + AiGatewayInferenceTableConfigPb pb = new AiGatewayInferenceTableConfigPb(); + pb.setCatalogName(catalogName); + pb.setEnabled(enabled); + pb.setSchemaName(schemaName); + pb.setTableNamePrefix(tableNamePrefix); + + return pb; + } + + static AiGatewayInferenceTableConfig fromPb(AiGatewayInferenceTableConfigPb pb) { + AiGatewayInferenceTableConfig model = new AiGatewayInferenceTableConfig(); + model.setCatalogName(pb.getCatalogName()); + model.setEnabled(pb.getEnabled()); + model.setSchemaName(pb.getSchemaName()); + model.setTableNamePrefix(pb.getTableNamePrefix()); + + return model; + } + + public static class AiGatewayInferenceTableConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + AiGatewayInferenceTableConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayInferenceTableConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayInferenceTableConfigDeserializer + extends JsonDeserializer { + @Override + public AiGatewayInferenceTableConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayInferenceTableConfigPb pb = + mapper.readValue(p, AiGatewayInferenceTableConfigPb.class); + return AiGatewayInferenceTableConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfigPb.java new file mode 100755 index 000000000..c3ed5b6c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfigPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AiGatewayInferenceTableConfigPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("table_name_prefix") + private String tableNamePrefix; + + public AiGatewayInferenceTableConfigPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public AiGatewayInferenceTableConfigPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public AiGatewayInferenceTableConfigPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public AiGatewayInferenceTableConfigPb setTableNamePrefix(String tableNamePrefix) { + this.tableNamePrefix = tableNamePrefix; + return this; + } + + public String getTableNamePrefix() { + return tableNamePrefix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayInferenceTableConfigPb that = (AiGatewayInferenceTableConfigPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(enabled, that.enabled) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(tableNamePrefix, that.tableNamePrefix); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, enabled, schemaName, tableNamePrefix); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayInferenceTableConfigPb.class) + .add("catalogName", catalogName) + .add("enabled", enabled) + .add("schemaName", schemaName) + .add("tableNamePrefix", tableNamePrefix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java index 5c1066f20..995383c79 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayRateLimit.AiGatewayRateLimitSerializer.class) +@JsonDeserialize(using = AiGatewayRateLimit.AiGatewayRateLimitDeserializer.class) public class AiGatewayRateLimit { /** Used to specify how many calls are allowed for a key within the renewal_period. */ - @JsonProperty("calls") private Long calls; /** * Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with * 'endpoint' being the default if not specified. */ - @JsonProperty("key") private AiGatewayRateLimitKey key; /** Renewal period field for a rate limit. Currently, only 'minute' is supported. */ - @JsonProperty("renewal_period") private AiGatewayRateLimitRenewalPeriod renewalPeriod; public AiGatewayRateLimit setCalls(Long calls) { @@ -74,4 +82,42 @@ public String toString() { .add("renewalPeriod", renewalPeriod) .toString(); } + + AiGatewayRateLimitPb toPb() { + AiGatewayRateLimitPb pb = new AiGatewayRateLimitPb(); + pb.setCalls(calls); + pb.setKey(key); + pb.setRenewalPeriod(renewalPeriod); + + return pb; + } + + static AiGatewayRateLimit fromPb(AiGatewayRateLimitPb pb) { + AiGatewayRateLimit model = new AiGatewayRateLimit(); + model.setCalls(pb.getCalls()); + model.setKey(pb.getKey()); + model.setRenewalPeriod(pb.getRenewalPeriod()); + + return model; + } + + public static class AiGatewayRateLimitSerializer extends JsonSerializer { + @Override + public void serialize(AiGatewayRateLimit value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayRateLimitPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayRateLimitDeserializer extends JsonDeserializer { + @Override + public AiGatewayRateLimit deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayRateLimitPb pb = mapper.readValue(p, AiGatewayRateLimitPb.class); + return AiGatewayRateLimit.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitPb.java new file mode 100755 index 000000000..00bced255 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AiGatewayRateLimitPb { + @JsonProperty("calls") + private Long calls; + + @JsonProperty("key") + private AiGatewayRateLimitKey key; + + @JsonProperty("renewal_period") + private AiGatewayRateLimitRenewalPeriod renewalPeriod; + + public AiGatewayRateLimitPb setCalls(Long calls) { + this.calls = calls; + return this; + } + + public Long getCalls() { + return calls; + } + + public AiGatewayRateLimitPb setKey(AiGatewayRateLimitKey key) { + this.key = key; + return this; + } + + public AiGatewayRateLimitKey getKey() { + return key; + } + + public AiGatewayRateLimitPb setRenewalPeriod(AiGatewayRateLimitRenewalPeriod renewalPeriod) { + this.renewalPeriod = renewalPeriod; + return this; + } + + public AiGatewayRateLimitRenewalPeriod getRenewalPeriod() { + return renewalPeriod; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayRateLimitPb that = (AiGatewayRateLimitPb) o; + return Objects.equals(calls, that.calls) + && Objects.equals(key, that.key) + && Objects.equals(renewalPeriod, that.renewalPeriod); + } + + @Override + public int hashCode() { + return Objects.hash(calls, key, renewalPeriod); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayRateLimitPb.class) + .add("calls", calls) + .add("key", key) + .add("renewalPeriod", renewalPeriod) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfig.java index e002231d4..e3ceb8c2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfig.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AiGatewayUsageTrackingConfig.AiGatewayUsageTrackingConfigSerializer.class) +@JsonDeserialize( + using = AiGatewayUsageTrackingConfig.AiGatewayUsageTrackingConfigDeserializer.class) public class AiGatewayUsageTrackingConfig { /** Whether to enable usage tracking. */ - @JsonProperty("enabled") private Boolean enabled; public AiGatewayUsageTrackingConfig setEnabled(Boolean enabled) { @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(AiGatewayUsageTrackingConfig.class).add("enabled", enabled).toString(); } + + AiGatewayUsageTrackingConfigPb toPb() { + AiGatewayUsageTrackingConfigPb pb = new AiGatewayUsageTrackingConfigPb(); + pb.setEnabled(enabled); + + return pb; + } + + static AiGatewayUsageTrackingConfig fromPb(AiGatewayUsageTrackingConfigPb pb) { + AiGatewayUsageTrackingConfig model = new AiGatewayUsageTrackingConfig(); + model.setEnabled(pb.getEnabled()); + + return model; + } + + public static class AiGatewayUsageTrackingConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + AiGatewayUsageTrackingConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AiGatewayUsageTrackingConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AiGatewayUsageTrackingConfigDeserializer + extends JsonDeserializer { + @Override + public AiGatewayUsageTrackingConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AiGatewayUsageTrackingConfigPb pb = mapper.readValue(p, AiGatewayUsageTrackingConfigPb.class); + return AiGatewayUsageTrackingConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfigPb.java new file mode 100755 index 000000000..35ebdcf19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfigPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AiGatewayUsageTrackingConfigPb { + @JsonProperty("enabled") + private Boolean enabled; + + public AiGatewayUsageTrackingConfigPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AiGatewayUsageTrackingConfigPb that = (AiGatewayUsageTrackingConfigPb) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(AiGatewayUsageTrackingConfigPb.class).add("enabled", enabled).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java index 430410354..a9ee2ea56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AmazonBedrockConfig.AmazonBedrockConfigSerializer.class) +@JsonDeserialize(using = AmazonBedrockConfig.AmazonBedrockConfigDeserializer.class) public class AmazonBedrockConfig { /** * The Databricks secret key reference for an AWS access key ID with permissions to interact with @@ -15,7 +26,6 @@ public class AmazonBedrockConfig { * `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields: * `aws_access_key_id` or `aws_access_key_id_plaintext`. */ - @JsonProperty("aws_access_key_id") private String awsAccessKeyId; /** @@ -24,11 +34,9 @@ public class AmazonBedrockConfig { * You must provide an API key using one of the following fields: `aws_access_key_id` or * `aws_access_key_id_plaintext`. */ - @JsonProperty("aws_access_key_id_plaintext") private String awsAccessKeyIdPlaintext; /** The AWS region to use. Bedrock has to be enabled there. */ - @JsonProperty("aws_region") private String awsRegion; /** @@ -37,7 +45,6 @@ public class AmazonBedrockConfig { * directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the * following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`. */ - @JsonProperty("aws_secret_access_key") private String awsSecretAccessKey; /** @@ -46,14 +53,12 @@ public class AmazonBedrockConfig { * Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the * following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`. */ - @JsonProperty("aws_secret_access_key_plaintext") private String awsSecretAccessKeyPlaintext; /** * The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: * Anthropic, Cohere, AI21Labs, Amazon. */ - @JsonProperty("bedrock_provider") private AmazonBedrockConfigBedrockProvider bedrockProvider; /** @@ -62,7 +67,6 @@ public class AmazonBedrockConfig { * access keys, see `aws_access_key_id`, `aws_access_key_id_plaintext`, `aws_secret_access_key` * and `aws_secret_access_key_plaintext`. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; public AmazonBedrockConfig setAwsAccessKeyId(String awsAccessKeyId) { @@ -167,4 +171,51 @@ public String toString() { .add("instanceProfileArn", instanceProfileArn) .toString(); } + + AmazonBedrockConfigPb toPb() { + AmazonBedrockConfigPb pb = new AmazonBedrockConfigPb(); + pb.setAwsAccessKeyId(awsAccessKeyId); + pb.setAwsAccessKeyIdPlaintext(awsAccessKeyIdPlaintext); + pb.setAwsRegion(awsRegion); + pb.setAwsSecretAccessKey(awsSecretAccessKey); + pb.setAwsSecretAccessKeyPlaintext(awsSecretAccessKeyPlaintext); + pb.setBedrockProvider(bedrockProvider); + pb.setInstanceProfileArn(instanceProfileArn); + + return pb; + } + + static AmazonBedrockConfig fromPb(AmazonBedrockConfigPb pb) { + AmazonBedrockConfig model = new AmazonBedrockConfig(); + model.setAwsAccessKeyId(pb.getAwsAccessKeyId()); + model.setAwsAccessKeyIdPlaintext(pb.getAwsAccessKeyIdPlaintext()); + model.setAwsRegion(pb.getAwsRegion()); + model.setAwsSecretAccessKey(pb.getAwsSecretAccessKey()); + model.setAwsSecretAccessKeyPlaintext(pb.getAwsSecretAccessKeyPlaintext()); + model.setBedrockProvider(pb.getBedrockProvider()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + + return model; + } + + public static class AmazonBedrockConfigSerializer extends JsonSerializer { + @Override + public void serialize(AmazonBedrockConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AmazonBedrockConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AmazonBedrockConfigDeserializer + extends JsonDeserializer { + @Override + public AmazonBedrockConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AmazonBedrockConfigPb pb = mapper.readValue(p, AmazonBedrockConfigPb.class); + return AmazonBedrockConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigPb.java new file mode 100755 index 000000000..404ed8abf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigPb.java @@ -0,0 +1,135 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AmazonBedrockConfigPb { + @JsonProperty("aws_access_key_id") + private String awsAccessKeyId; + + @JsonProperty("aws_access_key_id_plaintext") + private String awsAccessKeyIdPlaintext; + + @JsonProperty("aws_region") + private String awsRegion; + + @JsonProperty("aws_secret_access_key") + private String awsSecretAccessKey; + + @JsonProperty("aws_secret_access_key_plaintext") + private String awsSecretAccessKeyPlaintext; + + @JsonProperty("bedrock_provider") + private AmazonBedrockConfigBedrockProvider bedrockProvider; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + public AmazonBedrockConfigPb setAwsAccessKeyId(String awsAccessKeyId) { + this.awsAccessKeyId = awsAccessKeyId; + return this; + } + + public String getAwsAccessKeyId() { + return awsAccessKeyId; + } + + public AmazonBedrockConfigPb setAwsAccessKeyIdPlaintext(String awsAccessKeyIdPlaintext) { + this.awsAccessKeyIdPlaintext = awsAccessKeyIdPlaintext; + return this; + } + + public String getAwsAccessKeyIdPlaintext() { + return awsAccessKeyIdPlaintext; + } + + public AmazonBedrockConfigPb setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + return this; + } + + public String getAwsRegion() { + return awsRegion; + } + + public AmazonBedrockConfigPb setAwsSecretAccessKey(String awsSecretAccessKey) { + this.awsSecretAccessKey = awsSecretAccessKey; + return this; + } + + public String getAwsSecretAccessKey() { + return awsSecretAccessKey; + } + + public AmazonBedrockConfigPb setAwsSecretAccessKeyPlaintext(String awsSecretAccessKeyPlaintext) { + this.awsSecretAccessKeyPlaintext = awsSecretAccessKeyPlaintext; + return this; + } + + public String getAwsSecretAccessKeyPlaintext() { + return awsSecretAccessKeyPlaintext; + } + + public AmazonBedrockConfigPb setBedrockProvider( + AmazonBedrockConfigBedrockProvider bedrockProvider) { + this.bedrockProvider = bedrockProvider; + return this; + } + + public AmazonBedrockConfigBedrockProvider getBedrockProvider() { + return bedrockProvider; + } + + public AmazonBedrockConfigPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AmazonBedrockConfigPb that = (AmazonBedrockConfigPb) o; + return Objects.equals(awsAccessKeyId, that.awsAccessKeyId) + && Objects.equals(awsAccessKeyIdPlaintext, that.awsAccessKeyIdPlaintext) + && Objects.equals(awsRegion, that.awsRegion) + && Objects.equals(awsSecretAccessKey, that.awsSecretAccessKey) + && Objects.equals(awsSecretAccessKeyPlaintext, that.awsSecretAccessKeyPlaintext) + && Objects.equals(bedrockProvider, that.bedrockProvider) + && Objects.equals(instanceProfileArn, that.instanceProfileArn); + } + + @Override + public int hashCode() { + return Objects.hash( + awsAccessKeyId, + awsAccessKeyIdPlaintext, + awsRegion, + awsSecretAccessKey, + awsSecretAccessKeyPlaintext, + bedrockProvider, + instanceProfileArn); + } + + @Override + public String toString() { + return new ToStringer(AmazonBedrockConfigPb.class) + .add("awsAccessKeyId", awsAccessKeyId) + .add("awsAccessKeyIdPlaintext", awsAccessKeyIdPlaintext) + .add("awsRegion", awsRegion) + .add("awsSecretAccessKey", awsSecretAccessKey) + .add("awsSecretAccessKeyPlaintext", awsSecretAccessKeyPlaintext) + .add("bedrockProvider", bedrockProvider) + .add("instanceProfileArn", instanceProfileArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java index 021c070e3..18f35a829 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AnthropicConfig.AnthropicConfigSerializer.class) +@JsonDeserialize(using = AnthropicConfig.AnthropicConfigDeserializer.class) public class AnthropicConfig { /** * The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API * key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the * following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`. */ - @JsonProperty("anthropic_api_key") private String anthropicApiKey; /** @@ -22,7 +32,6 @@ public class AnthropicConfig { * Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the * following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`. */ - @JsonProperty("anthropic_api_key_plaintext") private String anthropicApiKeyPlaintext; public AnthropicConfig setAnthropicApiKey(String anthropicApiKey) { @@ -64,4 +73,40 @@ public String toString() { .add("anthropicApiKeyPlaintext", anthropicApiKeyPlaintext) .toString(); } + + AnthropicConfigPb toPb() { + AnthropicConfigPb pb = new AnthropicConfigPb(); + pb.setAnthropicApiKey(anthropicApiKey); + pb.setAnthropicApiKeyPlaintext(anthropicApiKeyPlaintext); + + return pb; + } + + static AnthropicConfig fromPb(AnthropicConfigPb pb) { + AnthropicConfig model = new AnthropicConfig(); + model.setAnthropicApiKey(pb.getAnthropicApiKey()); + model.setAnthropicApiKeyPlaintext(pb.getAnthropicApiKeyPlaintext()); + + return model; + } + + public static class AnthropicConfigSerializer extends JsonSerializer { + @Override + public void serialize(AnthropicConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AnthropicConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AnthropicConfigDeserializer extends JsonDeserializer { + @Override + public AnthropicConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AnthropicConfigPb pb = mapper.readValue(p, AnthropicConfigPb.class); + return AnthropicConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfigPb.java new file mode 100755 index 000000000..6fca85309 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AnthropicConfigPb { + @JsonProperty("anthropic_api_key") + private String anthropicApiKey; + + @JsonProperty("anthropic_api_key_plaintext") + private String anthropicApiKeyPlaintext; + + public AnthropicConfigPb setAnthropicApiKey(String anthropicApiKey) { + this.anthropicApiKey = anthropicApiKey; + return this; + } + + public String getAnthropicApiKey() { + return anthropicApiKey; + } + + public AnthropicConfigPb setAnthropicApiKeyPlaintext(String anthropicApiKeyPlaintext) { + this.anthropicApiKeyPlaintext = anthropicApiKeyPlaintext; + return this; + } + + public String getAnthropicApiKeyPlaintext() { + return anthropicApiKeyPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnthropicConfigPb that = (AnthropicConfigPb) o; + return Objects.equals(anthropicApiKey, that.anthropicApiKey) + && Objects.equals(anthropicApiKeyPlaintext, that.anthropicApiKeyPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(anthropicApiKey, anthropicApiKeyPlaintext); + } + + @Override + public String toString() { + return new ToStringer(AnthropicConfigPb.class) + .add("anthropicApiKey", anthropicApiKey) + .add("anthropicApiKeyPlaintext", anthropicApiKeyPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java index 0a87654f7..872ea4392 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java @@ -4,27 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ApiKeyAuth.ApiKeyAuthSerializer.class) +@JsonDeserialize(using = ApiKeyAuth.ApiKeyAuthDeserializer.class) public class ApiKeyAuth { /** The name of the API key parameter used for authentication. */ - @JsonProperty("key") private String key; /** * The Databricks secret key reference for an API Key. If you prefer to paste your token directly, * see `value_plaintext`. */ - @JsonProperty("value") private String value; /** * The API Key provided as a plaintext string. If you prefer to reference your token using * Databricks Secrets, see `value`. */ - @JsonProperty("value_plaintext") private String valuePlaintext; public ApiKeyAuth setKey(String key) { @@ -77,4 +85,41 @@ public String toString() { .add("valuePlaintext", valuePlaintext) .toString(); } + + ApiKeyAuthPb toPb() { + ApiKeyAuthPb pb = new ApiKeyAuthPb(); + pb.setKey(key); + pb.setValue(value); + pb.setValuePlaintext(valuePlaintext); + + return pb; + } + + static ApiKeyAuth fromPb(ApiKeyAuthPb pb) { + ApiKeyAuth model = new ApiKeyAuth(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + model.setValuePlaintext(pb.getValuePlaintext()); + + return model; + } + + public static class ApiKeyAuthSerializer extends JsonSerializer { + @Override + public void serialize(ApiKeyAuth value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ApiKeyAuthPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ApiKeyAuthDeserializer extends JsonDeserializer { + @Override + public ApiKeyAuth deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ApiKeyAuthPb pb = mapper.readValue(p, ApiKeyAuthPb.class); + return ApiKeyAuth.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuthPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuthPb.java new file mode 100755 index 000000000..d64ff76ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuthPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ApiKeyAuthPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + @JsonProperty("value_plaintext") + private String valuePlaintext; + + public ApiKeyAuthPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ApiKeyAuthPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + public ApiKeyAuthPb setValuePlaintext(String valuePlaintext) { + this.valuePlaintext = valuePlaintext; + return this; + } + + public String getValuePlaintext() { + return valuePlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApiKeyAuthPb that = (ApiKeyAuthPb) o; + return Objects.equals(key, that.key) + && Objects.equals(value, that.value) + && Objects.equals(valuePlaintext, that.valuePlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(key, value, valuePlaintext); + } + + @Override + public String toString() { + return new ToStringer(ApiKeyAuthPb.class) + .add("key", key) + .add("value", value) + .add("valuePlaintext", valuePlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java index e34add98e..771ed03d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java @@ -4,34 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AutoCaptureConfigInput.AutoCaptureConfigInputSerializer.class) +@JsonDeserialize(using = AutoCaptureConfigInput.AutoCaptureConfigInputDeserializer.class) public class AutoCaptureConfigInput { /** * The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name * if the inference table is already enabled. */ - @JsonProperty("catalog_name") private String catalogName; /** Indicates whether the inference table is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** * The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if * the inference table is already enabled. */ - @JsonProperty("schema_name") private String schemaName; /** * The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if * the inference table is already enabled. */ - @JsonProperty("table_name_prefix") private String tableNamePrefix; public AutoCaptureConfigInput setCatalogName(String catalogName) { @@ -95,4 +102,47 @@ public String toString() { .add("tableNamePrefix", tableNamePrefix) .toString(); } + + AutoCaptureConfigInputPb toPb() { + AutoCaptureConfigInputPb pb = new AutoCaptureConfigInputPb(); + pb.setCatalogName(catalogName); + pb.setEnabled(enabled); + pb.setSchemaName(schemaName); + pb.setTableNamePrefix(tableNamePrefix); + + return pb; + } + + static AutoCaptureConfigInput fromPb(AutoCaptureConfigInputPb pb) { + AutoCaptureConfigInput model = new AutoCaptureConfigInput(); + model.setCatalogName(pb.getCatalogName()); + model.setEnabled(pb.getEnabled()); + model.setSchemaName(pb.getSchemaName()); + model.setTableNamePrefix(pb.getTableNamePrefix()); + + return model; + } + + public static class AutoCaptureConfigInputSerializer + extends JsonSerializer { + @Override + public void serialize( + AutoCaptureConfigInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AutoCaptureConfigInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AutoCaptureConfigInputDeserializer + extends JsonDeserializer { + @Override + public AutoCaptureConfigInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AutoCaptureConfigInputPb pb = mapper.readValue(p, AutoCaptureConfigInputPb.class); + return AutoCaptureConfigInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInputPb.java new file mode 100755 index 000000000..ecb92832e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInputPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AutoCaptureConfigInputPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("table_name_prefix") + private String tableNamePrefix; + + public AutoCaptureConfigInputPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public AutoCaptureConfigInputPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public AutoCaptureConfigInputPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public AutoCaptureConfigInputPb setTableNamePrefix(String tableNamePrefix) { + this.tableNamePrefix = tableNamePrefix; + return this; + } + + public String getTableNamePrefix() { + return tableNamePrefix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoCaptureConfigInputPb that = (AutoCaptureConfigInputPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(enabled, that.enabled) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(tableNamePrefix, that.tableNamePrefix); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, enabled, schemaName, tableNamePrefix); + } + + @Override + public String toString() { + return new ToStringer(AutoCaptureConfigInputPb.class) + .add("catalogName", catalogName) + .add("enabled", enabled) + .add("schemaName", schemaName) + .add("tableNamePrefix", tableNamePrefix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java index cb1665074..0c633618e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java @@ -4,38 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AutoCaptureConfigOutput.AutoCaptureConfigOutputSerializer.class) +@JsonDeserialize(using = AutoCaptureConfigOutput.AutoCaptureConfigOutputDeserializer.class) public class AutoCaptureConfigOutput { /** * The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name * if the inference table is already enabled. */ - @JsonProperty("catalog_name") private String catalogName; /** Indicates whether the inference table is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** * The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if * the inference table is already enabled. */ - @JsonProperty("schema_name") private String schemaName; /** */ - @JsonProperty("state") private AutoCaptureState state; /** * The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if * the inference table is already enabled. */ - @JsonProperty("table_name_prefix") private String tableNamePrefix; public AutoCaptureConfigOutput setCatalogName(String catalogName) { @@ -110,4 +116,49 @@ public String toString() { .add("tableNamePrefix", tableNamePrefix) .toString(); } + + AutoCaptureConfigOutputPb toPb() { + AutoCaptureConfigOutputPb pb = new AutoCaptureConfigOutputPb(); + pb.setCatalogName(catalogName); + pb.setEnabled(enabled); + pb.setSchemaName(schemaName); + pb.setState(state); + pb.setTableNamePrefix(tableNamePrefix); + + return pb; + } + + static AutoCaptureConfigOutput fromPb(AutoCaptureConfigOutputPb pb) { + AutoCaptureConfigOutput model = new AutoCaptureConfigOutput(); + model.setCatalogName(pb.getCatalogName()); + model.setEnabled(pb.getEnabled()); + model.setSchemaName(pb.getSchemaName()); + model.setState(pb.getState()); + model.setTableNamePrefix(pb.getTableNamePrefix()); + + return model; + } + + public static class AutoCaptureConfigOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + AutoCaptureConfigOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AutoCaptureConfigOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AutoCaptureConfigOutputDeserializer + extends JsonDeserializer { + @Override + public AutoCaptureConfigOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AutoCaptureConfigOutputPb pb = mapper.readValue(p, AutoCaptureConfigOutputPb.class); + return AutoCaptureConfigOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutputPb.java new file mode 100755 index 000000000..1d11e6462 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutputPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AutoCaptureConfigOutputPb { + @JsonProperty("catalog_name") + private String catalogName; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("state") + private AutoCaptureState state; + + @JsonProperty("table_name_prefix") + private String tableNamePrefix; + + public AutoCaptureConfigOutputPb setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public AutoCaptureConfigOutputPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public AutoCaptureConfigOutputPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public AutoCaptureConfigOutputPb setState(AutoCaptureState state) { + this.state = state; + return this; + } + + public AutoCaptureState getState() { + return state; + } + + public AutoCaptureConfigOutputPb setTableNamePrefix(String tableNamePrefix) { + this.tableNamePrefix = tableNamePrefix; + return this; + } + + public String getTableNamePrefix() { + return tableNamePrefix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoCaptureConfigOutputPb that = (AutoCaptureConfigOutputPb) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(enabled, that.enabled) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(state, that.state) + && Objects.equals(tableNamePrefix, that.tableNamePrefix); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, enabled, schemaName, state, tableNamePrefix); + } + + @Override + public String toString() { + return new ToStringer(AutoCaptureConfigOutputPb.class) + .add("catalogName", catalogName) + .add("enabled", enabled) + .add("schemaName", schemaName) + .add("state", state) + .add("tableNamePrefix", tableNamePrefix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java index d08c23e6d..3b944974f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AutoCaptureState.AutoCaptureStateSerializer.class) +@JsonDeserialize(using = AutoCaptureState.AutoCaptureStateDeserializer.class) public class AutoCaptureState { /** */ - @JsonProperty("payload_table") private PayloadTable payloadTable; public AutoCaptureState setPayloadTable(PayloadTable payloadTable) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(AutoCaptureState.class).add("payloadTable", payloadTable).toString(); } + + AutoCaptureStatePb toPb() { + AutoCaptureStatePb pb = new AutoCaptureStatePb(); + pb.setPayloadTable(payloadTable); + + return pb; + } + + static AutoCaptureState fromPb(AutoCaptureStatePb pb) { + AutoCaptureState model = new AutoCaptureState(); + model.setPayloadTable(pb.getPayloadTable()); + + return model; + } + + public static class AutoCaptureStateSerializer extends JsonSerializer { + @Override + public void serialize(AutoCaptureState value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AutoCaptureStatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AutoCaptureStateDeserializer extends JsonDeserializer { + @Override + public AutoCaptureState deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AutoCaptureStatePb pb = mapper.readValue(p, AutoCaptureStatePb.class); + return AutoCaptureState.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureStatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureStatePb.java new file mode 100755 index 000000000..1fc35258e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureStatePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AutoCaptureStatePb { + @JsonProperty("payload_table") + private PayloadTable payloadTable; + + public AutoCaptureStatePb setPayloadTable(PayloadTable payloadTable) { + this.payloadTable = payloadTable; + return this; + } + + public PayloadTable getPayloadTable() { + return payloadTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoCaptureStatePb that = (AutoCaptureStatePb) o; + return Objects.equals(payloadTable, that.payloadTable); + } + + @Override + public int hashCode() { + return Objects.hash(payloadTable); + } + + @Override + public String toString() { + return new ToStringer(AutoCaptureStatePb.class).add("payloadTable", payloadTable).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java index 8f1db2a3b..e9528e91f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java @@ -4,23 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = BearerTokenAuth.BearerTokenAuthSerializer.class) +@JsonDeserialize(using = BearerTokenAuth.BearerTokenAuthDeserializer.class) public class BearerTokenAuth { /** * The Databricks secret key reference for a token. If you prefer to paste your token directly, * see `token_plaintext`. */ - @JsonProperty("token") private String token; /** * The token provided as a plaintext string. If you prefer to reference your token using * Databricks Secrets, see `token`. */ - @JsonProperty("token_plaintext") private String tokenPlaintext; public BearerTokenAuth setToken(String token) { @@ -61,4 +70,40 @@ public String toString() { .add("tokenPlaintext", tokenPlaintext) .toString(); } + + BearerTokenAuthPb toPb() { + BearerTokenAuthPb pb = new BearerTokenAuthPb(); + pb.setToken(token); + pb.setTokenPlaintext(tokenPlaintext); + + return pb; + } + + static BearerTokenAuth fromPb(BearerTokenAuthPb pb) { + BearerTokenAuth model = new BearerTokenAuth(); + model.setToken(pb.getToken()); + model.setTokenPlaintext(pb.getTokenPlaintext()); + + return model; + } + + public static class BearerTokenAuthSerializer extends JsonSerializer { + @Override + public void serialize(BearerTokenAuth value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BearerTokenAuthPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BearerTokenAuthDeserializer extends JsonDeserializer { + @Override + public BearerTokenAuth deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BearerTokenAuthPb pb = mapper.readValue(p, BearerTokenAuthPb.class); + return BearerTokenAuth.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuthPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuthPb.java new file mode 100755 index 000000000..083cf922e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuthPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class BearerTokenAuthPb { + @JsonProperty("token") + private String token; + + @JsonProperty("token_plaintext") + private String tokenPlaintext; + + public BearerTokenAuthPb setToken(String token) { + this.token = token; + return this; + } + + public String getToken() { + return token; + } + + public BearerTokenAuthPb setTokenPlaintext(String tokenPlaintext) { + this.tokenPlaintext = tokenPlaintext; + return this; + } + + public String getTokenPlaintext() { + return tokenPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BearerTokenAuthPb that = (BearerTokenAuthPb) o; + return Objects.equals(token, that.token) && Objects.equals(tokenPlaintext, that.tokenPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(token, tokenPlaintext); + } + + @Override + public String toString() { + return new ToStringer(BearerTokenAuthPb.class) + .add("token", token) + .add("tokenPlaintext", tokenPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java index 0beec7085..094e51876 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get build logs for a served model */ @Generated +@JsonSerialize(using = BuildLogsRequest.BuildLogsRequestSerializer.class) +@JsonDeserialize(using = BuildLogsRequest.BuildLogsRequestDeserializer.class) public class BuildLogsRequest { /** The name of the serving endpoint that the served model belongs to. This field is required. */ - @JsonIgnore private String name; + private String name; /** The name of the served model that build logs will be retrieved for. This field is required. */ - @JsonIgnore private String servedModelName; + private String servedModelName; public BuildLogsRequest setName(String name) { this.name = name; @@ -54,4 +65,40 @@ public String toString() { .add("servedModelName", servedModelName) .toString(); } + + BuildLogsRequestPb toPb() { + BuildLogsRequestPb pb = new BuildLogsRequestPb(); + pb.setName(name); + pb.setServedModelName(servedModelName); + + return pb; + } + + static BuildLogsRequest fromPb(BuildLogsRequestPb pb) { + BuildLogsRequest model = new BuildLogsRequest(); + model.setName(pb.getName()); + model.setServedModelName(pb.getServedModelName()); + + return model; + } + + public static class BuildLogsRequestSerializer extends JsonSerializer { + @Override + public void serialize(BuildLogsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BuildLogsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BuildLogsRequestDeserializer extends JsonDeserializer { + @Override + public BuildLogsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BuildLogsRequestPb pb = mapper.readValue(p, BuildLogsRequestPb.class); + return BuildLogsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequestPb.java new file mode 100755 index 000000000..8e1148cdd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get build logs for a served model */ +@Generated +class BuildLogsRequestPb { + @JsonIgnore private String name; + + @JsonIgnore private String servedModelName; + + public BuildLogsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public BuildLogsRequestPb setServedModelName(String servedModelName) { + this.servedModelName = servedModelName; + return this; + } + + public String getServedModelName() { + return servedModelName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BuildLogsRequestPb that = (BuildLogsRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(servedModelName, that.servedModelName); + } + + @Override + public int hashCode() { + return Objects.hash(name, servedModelName); + } + + @Override + public String toString() { + return new ToStringer(BuildLogsRequestPb.class) + .add("name", name) + .add("servedModelName", servedModelName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java index 334b157b0..915fef118 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = BuildLogsResponse.BuildLogsResponseSerializer.class) +@JsonDeserialize(using = BuildLogsResponse.BuildLogsResponseDeserializer.class) public class BuildLogsResponse { /** The logs associated with building the served entity's environment. */ - @JsonProperty("logs") private String logs; public BuildLogsResponse setLogs(String logs) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(BuildLogsResponse.class).add("logs", logs).toString(); } + + BuildLogsResponsePb toPb() { + BuildLogsResponsePb pb = new BuildLogsResponsePb(); + pb.setLogs(logs); + + return pb; + } + + static BuildLogsResponse fromPb(BuildLogsResponsePb pb) { + BuildLogsResponse model = new BuildLogsResponse(); + model.setLogs(pb.getLogs()); + + return model; + } + + public static class BuildLogsResponseSerializer extends JsonSerializer { + @Override + public void serialize(BuildLogsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BuildLogsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BuildLogsResponseDeserializer extends JsonDeserializer { + @Override + public BuildLogsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BuildLogsResponsePb pb = mapper.readValue(p, BuildLogsResponsePb.class); + return BuildLogsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponsePb.java new file mode 100755 index 000000000..d5fce73bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class BuildLogsResponsePb { + @JsonProperty("logs") + private String logs; + + public BuildLogsResponsePb setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BuildLogsResponsePb that = (BuildLogsResponsePb) o; + return Objects.equals(logs, that.logs); + } + + @Override + public int hashCode() { + return Objects.hash(logs); + } + + @Override + public String toString() { + return new ToStringer(BuildLogsResponsePb.class).add("logs", logs).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java index f4551e036..978d84704 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ChatMessage.ChatMessageSerializer.class) +@JsonDeserialize(using = ChatMessage.ChatMessageDeserializer.class) public class ChatMessage { /** The content of the message. */ - @JsonProperty("content") private String content; /** The role of the message. One of [system, user, assistant]. */ - @JsonProperty("role") private ChatMessageRole role; public ChatMessage setContent(String content) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(ChatMessage.class).add("content", content).add("role", role).toString(); } + + ChatMessagePb toPb() { + ChatMessagePb pb = new ChatMessagePb(); + pb.setContent(content); + pb.setRole(role); + + return pb; + } + + static ChatMessage fromPb(ChatMessagePb pb) { + ChatMessage model = new ChatMessage(); + model.setContent(pb.getContent()); + model.setRole(pb.getRole()); + + return model; + } + + public static class ChatMessageSerializer extends JsonSerializer { + @Override + public void serialize(ChatMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ChatMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ChatMessageDeserializer extends JsonDeserializer { + @Override + public ChatMessage deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ChatMessagePb pb = mapper.readValue(p, ChatMessagePb.class); + return ChatMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessagePb.java new file mode 100755 index 000000000..49fb601d2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessagePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ChatMessagePb { + @JsonProperty("content") + private String content; + + @JsonProperty("role") + private ChatMessageRole role; + + public ChatMessagePb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public ChatMessagePb setRole(ChatMessageRole role) { + this.role = role; + return this; + } + + public ChatMessageRole getRole() { + return role; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ChatMessagePb that = (ChatMessagePb) o; + return Objects.equals(content, that.content) && Objects.equals(role, that.role); + } + + @Override + public int hashCode() { + return Objects.hash(content, role); + } + + @Override + public String toString() { + return new ToStringer(ChatMessagePb.class).add("content", content).add("role", role).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java index 1b7e310aa..c4409c421 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CohereConfig.CohereConfigSerializer.class) +@JsonDeserialize(using = CohereConfig.CohereConfigDeserializer.class) public class CohereConfig { /** * This is an optional field to provide a customized base URL for the Cohere API. If left * unspecified, the standard Cohere base URL is used. */ - @JsonProperty("cohere_api_base") private String cohereApiBase; /** @@ -21,7 +31,6 @@ public class CohereConfig { * directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the * following fields: `cohere_api_key` or `cohere_api_key_plaintext`. */ - @JsonProperty("cohere_api_key") private String cohereApiKey; /** @@ -29,7 +38,6 @@ public class CohereConfig { * Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the * following fields: `cohere_api_key` or `cohere_api_key_plaintext`. */ - @JsonProperty("cohere_api_key_plaintext") private String cohereApiKeyPlaintext; public CohereConfig setCohereApiBase(String cohereApiBase) { @@ -82,4 +90,41 @@ public String toString() { .add("cohereApiKeyPlaintext", cohereApiKeyPlaintext) .toString(); } + + CohereConfigPb toPb() { + CohereConfigPb pb = new CohereConfigPb(); + pb.setCohereApiBase(cohereApiBase); + pb.setCohereApiKey(cohereApiKey); + pb.setCohereApiKeyPlaintext(cohereApiKeyPlaintext); + + return pb; + } + + static CohereConfig fromPb(CohereConfigPb pb) { + CohereConfig model = new CohereConfig(); + model.setCohereApiBase(pb.getCohereApiBase()); + model.setCohereApiKey(pb.getCohereApiKey()); + model.setCohereApiKeyPlaintext(pb.getCohereApiKeyPlaintext()); + + return model; + } + + public static class CohereConfigSerializer extends JsonSerializer { + @Override + public void serialize(CohereConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CohereConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CohereConfigDeserializer extends JsonDeserializer { + @Override + public CohereConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CohereConfigPb pb = mapper.readValue(p, CohereConfigPb.class); + return CohereConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfigPb.java new file mode 100755 index 000000000..800301735 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfigPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CohereConfigPb { + @JsonProperty("cohere_api_base") + private String cohereApiBase; + + @JsonProperty("cohere_api_key") + private String cohereApiKey; + + @JsonProperty("cohere_api_key_plaintext") + private String cohereApiKeyPlaintext; + + public CohereConfigPb setCohereApiBase(String cohereApiBase) { + this.cohereApiBase = cohereApiBase; + return this; + } + + public String getCohereApiBase() { + return cohereApiBase; + } + + public CohereConfigPb setCohereApiKey(String cohereApiKey) { + this.cohereApiKey = cohereApiKey; + return this; + } + + public String getCohereApiKey() { + return cohereApiKey; + } + + public CohereConfigPb setCohereApiKeyPlaintext(String cohereApiKeyPlaintext) { + this.cohereApiKeyPlaintext = cohereApiKeyPlaintext; + return this; + } + + public String getCohereApiKeyPlaintext() { + return cohereApiKeyPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereConfigPb that = (CohereConfigPb) o; + return Objects.equals(cohereApiBase, that.cohereApiBase) + && Objects.equals(cohereApiKey, that.cohereApiKey) + && Objects.equals(cohereApiKeyPlaintext, that.cohereApiKeyPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(cohereApiBase, cohereApiKey, cohereApiKeyPlaintext); + } + + @Override + public String toString() { + return new ToStringer(CohereConfigPb.class) + .add("cohereApiBase", cohereApiBase) + .add("cohereApiKey", cohereApiKey) + .add("cohereApiKeyPlaintext", cohereApiKeyPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Converters.java new file mode 100755 index 000000000..e4c487f17 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.serving; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java index e7ae83feb..e3099ebf1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java @@ -4,33 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreatePtEndpointRequest.CreatePtEndpointRequestSerializer.class) +@JsonDeserialize(using = CreatePtEndpointRequest.CreatePtEndpointRequestDeserializer.class) public class CreatePtEndpointRequest { /** The AI Gateway configuration for the serving endpoint. */ - @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; /** The budget policy associated with the endpoint. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** The core config of the serving endpoint. */ - @JsonProperty("config") private PtEndpointCoreConfig config; /** * The name of the serving endpoint. This field is required and must be unique across a Databricks * workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. */ - @JsonProperty("name") private String name; /** Tags to be attached to the serving endpoint and automatically propagated to billing logs. */ - @JsonProperty("tags") private Collection tags; public CreatePtEndpointRequest setAiGateway(AiGatewayConfig aiGateway) { @@ -105,4 +111,49 @@ public String toString() { .add("tags", tags) .toString(); } + + CreatePtEndpointRequestPb toPb() { + CreatePtEndpointRequestPb pb = new CreatePtEndpointRequestPb(); + pb.setAiGateway(aiGateway); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setConfig(config); + pb.setName(name); + pb.setTags(tags); + + return pb; + } + + static CreatePtEndpointRequest fromPb(CreatePtEndpointRequestPb pb) { + CreatePtEndpointRequest model = new CreatePtEndpointRequest(); + model.setAiGateway(pb.getAiGateway()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setConfig(pb.getConfig()); + model.setName(pb.getName()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreatePtEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePtEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePtEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePtEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public CreatePtEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePtEndpointRequestPb pb = mapper.readValue(p, CreatePtEndpointRequestPb.class); + return CreatePtEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequestPb.java new file mode 100755 index 000000000..012b04785 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequestPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreatePtEndpointRequestPb { + @JsonProperty("ai_gateway") + private AiGatewayConfig aiGateway; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("config") + private PtEndpointCoreConfig config; + + @JsonProperty("name") + private String name; + + @JsonProperty("tags") + private Collection tags; + + public CreatePtEndpointRequestPb setAiGateway(AiGatewayConfig aiGateway) { + this.aiGateway = aiGateway; + return this; + } + + public AiGatewayConfig getAiGateway() { + return aiGateway; + } + + public CreatePtEndpointRequestPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreatePtEndpointRequestPb setConfig(PtEndpointCoreConfig config) { + this.config = config; + return this; + } + + public PtEndpointCoreConfig getConfig() { + return config; + } + + public CreatePtEndpointRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreatePtEndpointRequestPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePtEndpointRequestPb that = (CreatePtEndpointRequestPb) o; + return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(config, that.config) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(aiGateway, budgetPolicyId, config, name, tags); + } + + @Override + public String toString() { + return new ToStringer(CreatePtEndpointRequestPb.class) + .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) + .add("config", config) + .add("name", name) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java index b02ca6ab8..07e00446b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java @@ -4,48 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateServingEndpoint.CreateServingEndpointSerializer.class) +@JsonDeserialize(using = CreateServingEndpoint.CreateServingEndpointDeserializer.class) public class CreateServingEndpoint { /** * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only * support inference tables. */ - @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; /** The budget policy to be applied to the serving endpoint. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** The core config of the serving endpoint. */ - @JsonProperty("config") private EndpointCoreConfigInput config; /** * The name of the serving endpoint. This field is required and must be unique across a Databricks * workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. */ - @JsonProperty("name") private String name; /** * Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use * AI Gateway to manage rate limits. */ - @JsonProperty("rate_limits") private Collection rateLimits; /** Enable route optimization for the serving endpoint. */ - @JsonProperty("route_optimized") private Boolean routeOptimized; /** Tags to be attached to the serving endpoint and automatically propagated to billing logs. */ - @JsonProperty("tags") private Collection tags; public CreateServingEndpoint setAiGateway(AiGatewayConfig aiGateway) { @@ -142,4 +146,53 @@ public String toString() { .add("tags", tags) .toString(); } + + CreateServingEndpointPb toPb() { + CreateServingEndpointPb pb = new CreateServingEndpointPb(); + pb.setAiGateway(aiGateway); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setConfig(config); + pb.setName(name); + pb.setRateLimits(rateLimits); + pb.setRouteOptimized(routeOptimized); + pb.setTags(tags); + + return pb; + } + + static CreateServingEndpoint fromPb(CreateServingEndpointPb pb) { + CreateServingEndpoint model = new CreateServingEndpoint(); + model.setAiGateway(pb.getAiGateway()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setConfig(pb.getConfig()); + model.setName(pb.getName()); + model.setRateLimits(pb.getRateLimits()); + model.setRouteOptimized(pb.getRouteOptimized()); + model.setTags(pb.getTags()); + + return model; + } + + public static class CreateServingEndpointSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateServingEndpoint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateServingEndpointPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateServingEndpointDeserializer + extends JsonDeserializer { + @Override + public CreateServingEndpoint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateServingEndpointPb pb = mapper.readValue(p, CreateServingEndpointPb.class); + return CreateServingEndpoint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpointPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpointPb.java new file mode 100755 index 000000000..6c0e228b4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpointPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateServingEndpointPb { + @JsonProperty("ai_gateway") + private AiGatewayConfig aiGateway; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("config") + private EndpointCoreConfigInput config; + + @JsonProperty("name") + private String name; + + @JsonProperty("rate_limits") + private Collection rateLimits; + + @JsonProperty("route_optimized") + private Boolean routeOptimized; + + @JsonProperty("tags") + private Collection tags; + + public CreateServingEndpointPb setAiGateway(AiGatewayConfig aiGateway) { + this.aiGateway = aiGateway; + return this; + } + + public AiGatewayConfig getAiGateway() { + return aiGateway; + } + + public CreateServingEndpointPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreateServingEndpointPb setConfig(EndpointCoreConfigInput config) { + this.config = config; + return this; + } + + public EndpointCoreConfigInput getConfig() { + return config; + } + + public CreateServingEndpointPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateServingEndpointPb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + public CreateServingEndpointPb setRouteOptimized(Boolean routeOptimized) { + this.routeOptimized = routeOptimized; + return this; + } + + public Boolean getRouteOptimized() { + return routeOptimized; + } + + public CreateServingEndpointPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServingEndpointPb that = (CreateServingEndpointPb) o; + return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(config, that.config) + && Objects.equals(name, that.name) + && Objects.equals(rateLimits, that.rateLimits) + && Objects.equals(routeOptimized, that.routeOptimized) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(aiGateway, budgetPolicyId, config, name, rateLimits, routeOptimized, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateServingEndpointPb.class) + .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) + .add("config", config) + .add("name", name) + .add("rateLimits", rateLimits) + .add("routeOptimized", routeOptimized) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java index 715bf402a..73bb1a87a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java @@ -4,28 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Configs needed to create a custom provider model route. */ @Generated +@JsonSerialize(using = CustomProviderConfig.CustomProviderConfigSerializer.class) +@JsonDeserialize(using = CustomProviderConfig.CustomProviderConfigDeserializer.class) public class CustomProviderConfig { /** * This is a field to provide API key authentication for the custom provider API. You can only * specify one authentication method. */ - @JsonProperty("api_key_auth") private ApiKeyAuth apiKeyAuth; /** * This is a field to provide bearer token authentication for the custom provider API. You can * only specify one authentication method. */ - @JsonProperty("bearer_token_auth") private BearerTokenAuth bearerTokenAuth; /** This is a field to provide the URL of the custom provider API. */ - @JsonProperty("custom_provider_url") private String customProviderUrl; public CustomProviderConfig setApiKeyAuth(ApiKeyAuth apiKeyAuth) { @@ -78,4 +86,44 @@ public String toString() { .add("customProviderUrl", customProviderUrl) .toString(); } + + CustomProviderConfigPb toPb() { + CustomProviderConfigPb pb = new CustomProviderConfigPb(); + pb.setApiKeyAuth(apiKeyAuth); + pb.setBearerTokenAuth(bearerTokenAuth); + pb.setCustomProviderUrl(customProviderUrl); + + return pb; + } + + static CustomProviderConfig fromPb(CustomProviderConfigPb pb) { + CustomProviderConfig model = new CustomProviderConfig(); + model.setApiKeyAuth(pb.getApiKeyAuth()); + model.setBearerTokenAuth(pb.getBearerTokenAuth()); + model.setCustomProviderUrl(pb.getCustomProviderUrl()); + + return model; + } + + public static class CustomProviderConfigSerializer extends JsonSerializer { + @Override + public void serialize( + CustomProviderConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CustomProviderConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomProviderConfigDeserializer + extends JsonDeserializer { + @Override + public CustomProviderConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomProviderConfigPb pb = mapper.readValue(p, CustomProviderConfigPb.class); + return CustomProviderConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfigPb.java new file mode 100755 index 000000000..f3e7bd372 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfigPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configs needed to create a custom provider model route. */ +@Generated +class CustomProviderConfigPb { + @JsonProperty("api_key_auth") + private ApiKeyAuth apiKeyAuth; + + @JsonProperty("bearer_token_auth") + private BearerTokenAuth bearerTokenAuth; + + @JsonProperty("custom_provider_url") + private String customProviderUrl; + + public CustomProviderConfigPb setApiKeyAuth(ApiKeyAuth apiKeyAuth) { + this.apiKeyAuth = apiKeyAuth; + return this; + } + + public ApiKeyAuth getApiKeyAuth() { + return apiKeyAuth; + } + + public CustomProviderConfigPb setBearerTokenAuth(BearerTokenAuth bearerTokenAuth) { + this.bearerTokenAuth = bearerTokenAuth; + return this; + } + + public BearerTokenAuth getBearerTokenAuth() { + return bearerTokenAuth; + } + + public CustomProviderConfigPb setCustomProviderUrl(String customProviderUrl) { + this.customProviderUrl = customProviderUrl; + return this; + } + + public String getCustomProviderUrl() { + return customProviderUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomProviderConfigPb that = (CustomProviderConfigPb) o; + return Objects.equals(apiKeyAuth, that.apiKeyAuth) + && Objects.equals(bearerTokenAuth, that.bearerTokenAuth) + && Objects.equals(customProviderUrl, that.customProviderUrl); + } + + @Override + public int hashCode() { + return Objects.hash(apiKeyAuth, bearerTokenAuth, customProviderUrl); + } + + @Override + public String toString() { + return new ToStringer(CustomProviderConfigPb.class) + .add("apiKeyAuth", apiKeyAuth) + .add("bearerTokenAuth", bearerTokenAuth) + .add("customProviderUrl", customProviderUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java index ee13ccec8..57d163a73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details necessary to query this object's API through the DataPlane APIs. */ @Generated +@JsonSerialize(using = DataPlaneInfo.DataPlaneInfoSerializer.class) +@JsonDeserialize(using = DataPlaneInfo.DataPlaneInfoDeserializer.class) public class DataPlaneInfo { /** Authorization details as a string. */ - @JsonProperty("authorization_details") private String authorizationDetails; /** The URL of the endpoint for this operation in the dataplane. */ - @JsonProperty("endpoint_url") private String endpointUrl; public DataPlaneInfo setAuthorizationDetails(String authorizationDetails) { @@ -57,4 +66,39 @@ public String toString() { .add("endpointUrl", endpointUrl) .toString(); } + + DataPlaneInfoPb toPb() { + DataPlaneInfoPb pb = new DataPlaneInfoPb(); + pb.setAuthorizationDetails(authorizationDetails); + pb.setEndpointUrl(endpointUrl); + + return pb; + } + + static DataPlaneInfo fromPb(DataPlaneInfoPb pb) { + DataPlaneInfo model = new DataPlaneInfo(); + model.setAuthorizationDetails(pb.getAuthorizationDetails()); + model.setEndpointUrl(pb.getEndpointUrl()); + + return model; + } + + public static class DataPlaneInfoSerializer extends JsonSerializer { + @Override + public void serialize(DataPlaneInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataPlaneInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataPlaneInfoDeserializer extends JsonDeserializer { + @Override + public DataPlaneInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataPlaneInfoPb pb = mapper.readValue(p, DataPlaneInfoPb.class); + return DataPlaneInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfoPb.java new file mode 100755 index 000000000..0df6189c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfoPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details necessary to query this object's API through the DataPlane APIs. */ +@Generated +class DataPlaneInfoPb { + @JsonProperty("authorization_details") + private String authorizationDetails; + + @JsonProperty("endpoint_url") + private String endpointUrl; + + public DataPlaneInfoPb setAuthorizationDetails(String authorizationDetails) { + this.authorizationDetails = authorizationDetails; + return this; + } + + public String getAuthorizationDetails() { + return authorizationDetails; + } + + public DataPlaneInfoPb setEndpointUrl(String endpointUrl) { + this.endpointUrl = endpointUrl; + return this; + } + + public String getEndpointUrl() { + return endpointUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataPlaneInfoPb that = (DataPlaneInfoPb) o; + return Objects.equals(authorizationDetails, that.authorizationDetails) + && Objects.equals(endpointUrl, that.endpointUrl); + } + + @Override + public int hashCode() { + return Objects.hash(authorizationDetails, endpointUrl); + } + + @Override + public String toString() { + return new ToStringer(DataPlaneInfoPb.class) + .add("authorizationDetails", authorizationDetails) + .add("endpointUrl", endpointUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java index b1ede715c..43f5edc6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java @@ -4,10 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DatabricksModelServingConfig.DatabricksModelServingConfigSerializer.class) +@JsonDeserialize( + using = DatabricksModelServingConfig.DatabricksModelServingConfigDeserializer.class) public class DatabricksModelServingConfig { /** * The Databricks secret key reference for a Databricks API token that corresponds to a user or @@ -16,7 +28,6 @@ public class DatabricksModelServingConfig { * `databricks_api_token_plaintext`. You must provide an API key using one of the following * fields: `databricks_api_token` or `databricks_api_token_plaintext`. */ - @JsonProperty("databricks_api_token") private String databricksApiToken; /** @@ -26,14 +37,12 @@ public class DatabricksModelServingConfig { * must provide an API key using one of the following fields: `databricks_api_token` or * `databricks_api_token_plaintext`. */ - @JsonProperty("databricks_api_token_plaintext") private String databricksApiTokenPlaintext; /** * The URL of the Databricks workspace containing the model serving endpoint pointed to by this * external model. */ - @JsonProperty("databricks_workspace_url") private String databricksWorkspaceUrl; public DatabricksModelServingConfig setDatabricksApiToken(String databricksApiToken) { @@ -87,4 +96,45 @@ public String toString() { .add("databricksWorkspaceUrl", databricksWorkspaceUrl) .toString(); } + + DatabricksModelServingConfigPb toPb() { + DatabricksModelServingConfigPb pb = new DatabricksModelServingConfigPb(); + pb.setDatabricksApiToken(databricksApiToken); + pb.setDatabricksApiTokenPlaintext(databricksApiTokenPlaintext); + pb.setDatabricksWorkspaceUrl(databricksWorkspaceUrl); + + return pb; + } + + static DatabricksModelServingConfig fromPb(DatabricksModelServingConfigPb pb) { + DatabricksModelServingConfig model = new DatabricksModelServingConfig(); + model.setDatabricksApiToken(pb.getDatabricksApiToken()); + model.setDatabricksApiTokenPlaintext(pb.getDatabricksApiTokenPlaintext()); + model.setDatabricksWorkspaceUrl(pb.getDatabricksWorkspaceUrl()); + + return model; + } + + public static class DatabricksModelServingConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + DatabricksModelServingConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DatabricksModelServingConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DatabricksModelServingConfigDeserializer + extends JsonDeserializer { + @Override + public DatabricksModelServingConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DatabricksModelServingConfigPb pb = mapper.readValue(p, DatabricksModelServingConfigPb.class); + return DatabricksModelServingConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfigPb.java new file mode 100755 index 000000000..e1093ae9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfigPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DatabricksModelServingConfigPb { + @JsonProperty("databricks_api_token") + private String databricksApiToken; + + @JsonProperty("databricks_api_token_plaintext") + private String databricksApiTokenPlaintext; + + @JsonProperty("databricks_workspace_url") + private String databricksWorkspaceUrl; + + public DatabricksModelServingConfigPb setDatabricksApiToken(String databricksApiToken) { + this.databricksApiToken = databricksApiToken; + return this; + } + + public String getDatabricksApiToken() { + return databricksApiToken; + } + + public DatabricksModelServingConfigPb setDatabricksApiTokenPlaintext( + String databricksApiTokenPlaintext) { + this.databricksApiTokenPlaintext = databricksApiTokenPlaintext; + return this; + } + + public String getDatabricksApiTokenPlaintext() { + return databricksApiTokenPlaintext; + } + + public DatabricksModelServingConfigPb setDatabricksWorkspaceUrl(String databricksWorkspaceUrl) { + this.databricksWorkspaceUrl = databricksWorkspaceUrl; + return this; + } + + public String getDatabricksWorkspaceUrl() { + return databricksWorkspaceUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksModelServingConfigPb that = (DatabricksModelServingConfigPb) o; + return Objects.equals(databricksApiToken, that.databricksApiToken) + && Objects.equals(databricksApiTokenPlaintext, that.databricksApiTokenPlaintext) + && Objects.equals(databricksWorkspaceUrl, that.databricksWorkspaceUrl); + } + + @Override + public int hashCode() { + return Objects.hash(databricksApiToken, databricksApiTokenPlaintext, databricksWorkspaceUrl); + } + + @Override + public String toString() { + return new ToStringer(DatabricksModelServingConfigPb.class) + .add("databricksApiToken", databricksApiToken) + .add("databricksApiTokenPlaintext", databricksApiTokenPlaintext) + .add("databricksWorkspaceUrl", databricksWorkspaceUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java index d146ddf42..f10f6ed91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DataframeSplitInput.DataframeSplitInputSerializer.class) +@JsonDeserialize(using = DataframeSplitInput.DataframeSplitInputDeserializer.class) public class DataframeSplitInput { /** */ - @JsonProperty("columns") private Collection columns; /** */ - @JsonProperty("data") private Collection data; /** */ - @JsonProperty("index") private Collection index; public DataframeSplitInput setColumns(Collection columns) { @@ -72,4 +80,43 @@ public String toString() { .add("index", index) .toString(); } + + DataframeSplitInputPb toPb() { + DataframeSplitInputPb pb = new DataframeSplitInputPb(); + pb.setColumns(columns); + pb.setData(data); + pb.setIndex(index); + + return pb; + } + + static DataframeSplitInput fromPb(DataframeSplitInputPb pb) { + DataframeSplitInput model = new DataframeSplitInput(); + model.setColumns(pb.getColumns()); + model.setData(pb.getData()); + model.setIndex(pb.getIndex()); + + return model; + } + + public static class DataframeSplitInputSerializer extends JsonSerializer { + @Override + public void serialize(DataframeSplitInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataframeSplitInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataframeSplitInputDeserializer + extends JsonDeserializer { + @Override + public DataframeSplitInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataframeSplitInputPb pb = mapper.readValue(p, DataframeSplitInputPb.class); + return DataframeSplitInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInputPb.java new file mode 100755 index 000000000..2a6753f1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInputPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DataframeSplitInputPb { + @JsonProperty("columns") + private Collection columns; + + @JsonProperty("data") + private Collection data; + + @JsonProperty("index") + private Collection index; + + public DataframeSplitInputPb setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public DataframeSplitInputPb setData(Collection data) { + this.data = data; + return this; + } + + public Collection getData() { + return data; + } + + public DataframeSplitInputPb setIndex(Collection index) { + this.index = index; + return this; + } + + public Collection getIndex() { + return index; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataframeSplitInputPb that = (DataframeSplitInputPb) o; + return Objects.equals(columns, that.columns) + && Objects.equals(data, that.data) + && Objects.equals(index, that.index); + } + + @Override + public int hashCode() { + return Objects.hash(columns, data, index); + } + + @Override + public String toString() { + return new ToStringer(DataframeSplitInputPb.class) + .add("columns", columns) + .add("data", data) + .add("index", index) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java index 94d8eb1f5..a1fe7e6bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponsePb.java new file mode 100755 index 000000000..892d79b78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java index ba579de1b..732e3c7f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a serving endpoint */ @Generated +@JsonSerialize(using = DeleteServingEndpointRequest.DeleteServingEndpointRequestSerializer.class) +@JsonDeserialize( + using = DeleteServingEndpointRequest.DeleteServingEndpointRequestDeserializer.class) public class DeleteServingEndpointRequest { /** */ - @JsonIgnore private String name; + private String name; public DeleteServingEndpointRequest setName(String name) { this.name = name; @@ -39,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteServingEndpointRequest.class).add("name", name).toString(); } + + DeleteServingEndpointRequestPb toPb() { + DeleteServingEndpointRequestPb pb = new DeleteServingEndpointRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteServingEndpointRequest fromPb(DeleteServingEndpointRequestPb pb) { + DeleteServingEndpointRequest model = new DeleteServingEndpointRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteServingEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteServingEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteServingEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteServingEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteServingEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteServingEndpointRequestPb pb = mapper.readValue(p, DeleteServingEndpointRequestPb.class); + return DeleteServingEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequestPb.java new file mode 100755 index 000000000..8db5c1302 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a serving endpoint */ +@Generated +class DeleteServingEndpointRequestPb { + @JsonIgnore private String name; + + public DeleteServingEndpointRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServingEndpointRequestPb that = (DeleteServingEndpointRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteServingEndpointRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java index 90b426e0a..d9c0b66db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java @@ -4,22 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + EmbeddingsV1ResponseEmbeddingElement.EmbeddingsV1ResponseEmbeddingElementSerializer.class) +@JsonDeserialize( + using = + EmbeddingsV1ResponseEmbeddingElement.EmbeddingsV1ResponseEmbeddingElementDeserializer.class) public class EmbeddingsV1ResponseEmbeddingElement { /** */ - @JsonProperty("embedding") private Collection embedding; /** The index of the embedding in the response. */ - @JsonProperty("index") private Long index; /** This will always be 'embedding'. */ - @JsonProperty("object") private EmbeddingsV1ResponseEmbeddingElementObject object; public EmbeddingsV1ResponseEmbeddingElement setEmbedding(Collection embedding) { @@ -73,4 +85,46 @@ public String toString() { .add("object", object) .toString(); } + + EmbeddingsV1ResponseEmbeddingElementPb toPb() { + EmbeddingsV1ResponseEmbeddingElementPb pb = new EmbeddingsV1ResponseEmbeddingElementPb(); + pb.setEmbedding(embedding); + pb.setIndex(index); + pb.setObject(object); + + return pb; + } + + static EmbeddingsV1ResponseEmbeddingElement fromPb(EmbeddingsV1ResponseEmbeddingElementPb pb) { + EmbeddingsV1ResponseEmbeddingElement model = new EmbeddingsV1ResponseEmbeddingElement(); + model.setEmbedding(pb.getEmbedding()); + model.setIndex(pb.getIndex()); + model.setObject(pb.getObject()); + + return model; + } + + public static class EmbeddingsV1ResponseEmbeddingElementSerializer + extends JsonSerializer { + @Override + public void serialize( + EmbeddingsV1ResponseEmbeddingElement value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmbeddingsV1ResponseEmbeddingElementPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmbeddingsV1ResponseEmbeddingElementDeserializer + extends JsonDeserializer { + @Override + public EmbeddingsV1ResponseEmbeddingElement deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmbeddingsV1ResponseEmbeddingElementPb pb = + mapper.readValue(p, EmbeddingsV1ResponseEmbeddingElementPb.class); + return EmbeddingsV1ResponseEmbeddingElement.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElementPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElementPb.java new file mode 100755 index 000000000..f6cb5dfc0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElementPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EmbeddingsV1ResponseEmbeddingElementPb { + @JsonProperty("embedding") + private Collection embedding; + + @JsonProperty("index") + private Long index; + + @JsonProperty("object") + private EmbeddingsV1ResponseEmbeddingElementObject object; + + public EmbeddingsV1ResponseEmbeddingElementPb setEmbedding(Collection embedding) { + this.embedding = embedding; + return this; + } + + public Collection getEmbedding() { + return embedding; + } + + public EmbeddingsV1ResponseEmbeddingElementPb setIndex(Long index) { + this.index = index; + return this; + } + + public Long getIndex() { + return index; + } + + public EmbeddingsV1ResponseEmbeddingElementPb setObject( + EmbeddingsV1ResponseEmbeddingElementObject object) { + this.object = object; + return this; + } + + public EmbeddingsV1ResponseEmbeddingElementObject getObject() { + return object; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EmbeddingsV1ResponseEmbeddingElementPb that = (EmbeddingsV1ResponseEmbeddingElementPb) o; + return Objects.equals(embedding, that.embedding) + && Objects.equals(index, that.index) + && Objects.equals(object, that.object); + } + + @Override + public int hashCode() { + return Objects.hash(embedding, index, object); + } + + @Override + public String toString() { + return new ToStringer(EmbeddingsV1ResponseEmbeddingElementPb.class) + .add("embedding", embedding) + .add("index", index) + .add("object", object) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java index 5e0034ef9..9598a967e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java @@ -4,12 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointCoreConfigInput.EndpointCoreConfigInputSerializer.class) +@JsonDeserialize(using = EndpointCoreConfigInput.EndpointCoreConfigInputDeserializer.class) public class EndpointCoreConfigInput { /** * Configuration for Inference Tables which automatically logs requests and responses to Unity @@ -17,25 +27,21 @@ public class EndpointCoreConfigInput { * updating existing provisioned throughput endpoints that never have inference table configured; * in these cases please use AI Gateway to manage inference tables. */ - @JsonProperty("auto_capture_config") private AutoCaptureConfigInput autoCaptureConfig; /** The name of the serving endpoint to update. This field is required. */ - @JsonIgnore private String name; + private String name; /** The list of served entities under the serving endpoint config. */ - @JsonProperty("served_entities") private Collection servedEntities; /** * (Deprecated, use served_entities instead) The list of served models under the serving endpoint * config. */ - @JsonProperty("served_models") private Collection servedModels; /** The traffic configuration associated with the serving endpoint config. */ - @JsonProperty("traffic_config") private TrafficConfig trafficConfig; public EndpointCoreConfigInput setAutoCaptureConfig(AutoCaptureConfigInput autoCaptureConfig) { @@ -110,4 +116,49 @@ public String toString() { .add("trafficConfig", trafficConfig) .toString(); } + + EndpointCoreConfigInputPb toPb() { + EndpointCoreConfigInputPb pb = new EndpointCoreConfigInputPb(); + pb.setAutoCaptureConfig(autoCaptureConfig); + pb.setName(name); + pb.setServedEntities(servedEntities); + pb.setServedModels(servedModels); + pb.setTrafficConfig(trafficConfig); + + return pb; + } + + static EndpointCoreConfigInput fromPb(EndpointCoreConfigInputPb pb) { + EndpointCoreConfigInput model = new EndpointCoreConfigInput(); + model.setAutoCaptureConfig(pb.getAutoCaptureConfig()); + model.setName(pb.getName()); + model.setServedEntities(pb.getServedEntities()); + model.setServedModels(pb.getServedModels()); + model.setTrafficConfig(pb.getTrafficConfig()); + + return model; + } + + public static class EndpointCoreConfigInputSerializer + extends JsonSerializer { + @Override + public void serialize( + EndpointCoreConfigInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointCoreConfigInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointCoreConfigInputDeserializer + extends JsonDeserializer { + @Override + public EndpointCoreConfigInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointCoreConfigInputPb pb = mapper.readValue(p, EndpointCoreConfigInputPb.class); + return EndpointCoreConfigInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInputPb.java new file mode 100755 index 000000000..f55895509 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInputPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointCoreConfigInputPb { + @JsonProperty("auto_capture_config") + private AutoCaptureConfigInput autoCaptureConfig; + + @JsonIgnore private String name; + + @JsonProperty("served_entities") + private Collection servedEntities; + + @JsonProperty("served_models") + private Collection servedModels; + + @JsonProperty("traffic_config") + private TrafficConfig trafficConfig; + + public EndpointCoreConfigInputPb setAutoCaptureConfig(AutoCaptureConfigInput autoCaptureConfig) { + this.autoCaptureConfig = autoCaptureConfig; + return this; + } + + public AutoCaptureConfigInput getAutoCaptureConfig() { + return autoCaptureConfig; + } + + public EndpointCoreConfigInputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EndpointCoreConfigInputPb setServedEntities(Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public EndpointCoreConfigInputPb setServedModels(Collection servedModels) { + this.servedModels = servedModels; + return this; + } + + public Collection getServedModels() { + return servedModels; + } + + public EndpointCoreConfigInputPb setTrafficConfig(TrafficConfig trafficConfig) { + this.trafficConfig = trafficConfig; + return this; + } + + public TrafficConfig getTrafficConfig() { + return trafficConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointCoreConfigInputPb that = (EndpointCoreConfigInputPb) o; + return Objects.equals(autoCaptureConfig, that.autoCaptureConfig) + && Objects.equals(name, that.name) + && Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(servedModels, that.servedModels) + && Objects.equals(trafficConfig, that.trafficConfig); + } + + @Override + public int hashCode() { + return Objects.hash(autoCaptureConfig, name, servedEntities, servedModels, trafficConfig); + } + + @Override + public String toString() { + return new ToStringer(EndpointCoreConfigInputPb.class) + .add("autoCaptureConfig", autoCaptureConfig) + .add("name", name) + .add("servedEntities", servedEntities) + .add("servedModels", servedModels) + .add("trafficConfig", trafficConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java index 253eaba34..734cbebbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointCoreConfigOutput.EndpointCoreConfigOutputSerializer.class) +@JsonDeserialize(using = EndpointCoreConfigOutput.EndpointCoreConfigOutputDeserializer.class) public class EndpointCoreConfigOutput { /** * Configuration for Inference Tables which automatically logs requests and responses to Unity @@ -16,26 +27,21 @@ public class EndpointCoreConfigOutput { * updating existing provisioned throughput endpoints that never have inference table configured; * in these cases please use AI Gateway to manage inference tables. */ - @JsonProperty("auto_capture_config") private AutoCaptureConfigOutput autoCaptureConfig; /** The config version that the serving endpoint is currently serving. */ - @JsonProperty("config_version") private Long configVersion; /** The list of served entities under the serving endpoint config. */ - @JsonProperty("served_entities") private Collection servedEntities; /** * (Deprecated, use served_entities instead) The list of served models under the serving endpoint * config. */ - @JsonProperty("served_models") private Collection servedModels; /** The traffic configuration associated with the serving endpoint config. */ - @JsonProperty("traffic_config") private TrafficConfig trafficConfig; public EndpointCoreConfigOutput setAutoCaptureConfig(AutoCaptureConfigOutput autoCaptureConfig) { @@ -111,4 +117,49 @@ public String toString() { .add("trafficConfig", trafficConfig) .toString(); } + + EndpointCoreConfigOutputPb toPb() { + EndpointCoreConfigOutputPb pb = new EndpointCoreConfigOutputPb(); + pb.setAutoCaptureConfig(autoCaptureConfig); + pb.setConfigVersion(configVersion); + pb.setServedEntities(servedEntities); + pb.setServedModels(servedModels); + pb.setTrafficConfig(trafficConfig); + + return pb; + } + + static EndpointCoreConfigOutput fromPb(EndpointCoreConfigOutputPb pb) { + EndpointCoreConfigOutput model = new EndpointCoreConfigOutput(); + model.setAutoCaptureConfig(pb.getAutoCaptureConfig()); + model.setConfigVersion(pb.getConfigVersion()); + model.setServedEntities(pb.getServedEntities()); + model.setServedModels(pb.getServedModels()); + model.setTrafficConfig(pb.getTrafficConfig()); + + return model; + } + + public static class EndpointCoreConfigOutputSerializer + extends JsonSerializer { + @Override + public void serialize( + EndpointCoreConfigOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointCoreConfigOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointCoreConfigOutputDeserializer + extends JsonDeserializer { + @Override + public EndpointCoreConfigOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointCoreConfigOutputPb pb = mapper.readValue(p, EndpointCoreConfigOutputPb.class); + return EndpointCoreConfigOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutputPb.java new file mode 100755 index 000000000..bc9d53fe7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutputPb.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointCoreConfigOutputPb { + @JsonProperty("auto_capture_config") + private AutoCaptureConfigOutput autoCaptureConfig; + + @JsonProperty("config_version") + private Long configVersion; + + @JsonProperty("served_entities") + private Collection servedEntities; + + @JsonProperty("served_models") + private Collection servedModels; + + @JsonProperty("traffic_config") + private TrafficConfig trafficConfig; + + public EndpointCoreConfigOutputPb setAutoCaptureConfig( + AutoCaptureConfigOutput autoCaptureConfig) { + this.autoCaptureConfig = autoCaptureConfig; + return this; + } + + public AutoCaptureConfigOutput getAutoCaptureConfig() { + return autoCaptureConfig; + } + + public EndpointCoreConfigOutputPb setConfigVersion(Long configVersion) { + this.configVersion = configVersion; + return this; + } + + public Long getConfigVersion() { + return configVersion; + } + + public EndpointCoreConfigOutputPb setServedEntities( + Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public EndpointCoreConfigOutputPb setServedModels(Collection servedModels) { + this.servedModels = servedModels; + return this; + } + + public Collection getServedModels() { + return servedModels; + } + + public EndpointCoreConfigOutputPb setTrafficConfig(TrafficConfig trafficConfig) { + this.trafficConfig = trafficConfig; + return this; + } + + public TrafficConfig getTrafficConfig() { + return trafficConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointCoreConfigOutputPb that = (EndpointCoreConfigOutputPb) o; + return Objects.equals(autoCaptureConfig, that.autoCaptureConfig) + && Objects.equals(configVersion, that.configVersion) + && Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(servedModels, that.servedModels) + && Objects.equals(trafficConfig, that.trafficConfig); + } + + @Override + public int hashCode() { + return Objects.hash( + autoCaptureConfig, configVersion, servedEntities, servedModels, trafficConfig); + } + + @Override + public String toString() { + return new ToStringer(EndpointCoreConfigOutputPb.class) + .add("autoCaptureConfig", autoCaptureConfig) + .add("configVersion", configVersion) + .add("servedEntities", servedEntities) + .add("servedModels", servedModels) + .add("trafficConfig", trafficConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummary.java index dd714e100..d7c887bd9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummary.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointCoreConfigSummary.EndpointCoreConfigSummarySerializer.class) +@JsonDeserialize(using = EndpointCoreConfigSummary.EndpointCoreConfigSummaryDeserializer.class) public class EndpointCoreConfigSummary { /** The list of served entities under the serving endpoint config. */ - @JsonProperty("served_entities") private Collection servedEntities; /** * (Deprecated, use served_entities instead) The list of served models under the serving endpoint * config. */ - @JsonProperty("served_models") private Collection servedModels; public EndpointCoreConfigSummary setServedEntities(Collection servedEntities) { @@ -60,4 +69,43 @@ public String toString() { .add("servedModels", servedModels) .toString(); } + + EndpointCoreConfigSummaryPb toPb() { + EndpointCoreConfigSummaryPb pb = new EndpointCoreConfigSummaryPb(); + pb.setServedEntities(servedEntities); + pb.setServedModels(servedModels); + + return pb; + } + + static EndpointCoreConfigSummary fromPb(EndpointCoreConfigSummaryPb pb) { + EndpointCoreConfigSummary model = new EndpointCoreConfigSummary(); + model.setServedEntities(pb.getServedEntities()); + model.setServedModels(pb.getServedModels()); + + return model; + } + + public static class EndpointCoreConfigSummarySerializer + extends JsonSerializer { + @Override + public void serialize( + EndpointCoreConfigSummary value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointCoreConfigSummaryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointCoreConfigSummaryDeserializer + extends JsonDeserializer { + @Override + public EndpointCoreConfigSummary deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointCoreConfigSummaryPb pb = mapper.readValue(p, EndpointCoreConfigSummaryPb.class); + return EndpointCoreConfigSummary.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummaryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummaryPb.java new file mode 100755 index 000000000..ac201694b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigSummaryPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointCoreConfigSummaryPb { + @JsonProperty("served_entities") + private Collection servedEntities; + + @JsonProperty("served_models") + private Collection servedModels; + + public EndpointCoreConfigSummaryPb setServedEntities( + Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public EndpointCoreConfigSummaryPb setServedModels(Collection servedModels) { + this.servedModels = servedModels; + return this; + } + + public Collection getServedModels() { + return servedModels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointCoreConfigSummaryPb that = (EndpointCoreConfigSummaryPb) o; + return Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(servedModels, that.servedModels); + } + + @Override + public int hashCode() { + return Objects.hash(servedEntities, servedModels); + } + + @Override + public String toString() { + return new ToStringer(EndpointCoreConfigSummaryPb.class) + .add("servedEntities", servedEntities) + .add("servedModels", servedModels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java index b25e58be5..a88d8d4bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointPendingConfig.EndpointPendingConfigSerializer.class) +@JsonDeserialize(using = EndpointPendingConfig.EndpointPendingConfigDeserializer.class) public class EndpointPendingConfig { /** * Configuration for Inference Tables which automatically logs requests and responses to Unity @@ -16,30 +27,24 @@ public class EndpointPendingConfig { * updating existing provisioned throughput endpoints that never have inference table configured; * in these cases please use AI Gateway to manage inference tables. */ - @JsonProperty("auto_capture_config") private AutoCaptureConfigOutput autoCaptureConfig; /** The config version that the serving endpoint is currently serving. */ - @JsonProperty("config_version") private Long configVersion; /** The list of served entities belonging to the last issued update to the serving endpoint. */ - @JsonProperty("served_entities") private Collection servedEntities; /** * (Deprecated, use served_entities instead) The list of served models belonging to the last * issued update to the serving endpoint. */ - @JsonProperty("served_models") private Collection servedModels; /** The timestamp when the update to the pending config started. */ - @JsonProperty("start_time") private Long startTime; /** The traffic config defining how invocations to the serving endpoint should be routed. */ - @JsonProperty("traffic_config") private TrafficConfig trafficConfig; public EndpointPendingConfig setAutoCaptureConfig(AutoCaptureConfigOutput autoCaptureConfig) { @@ -126,4 +131,51 @@ public String toString() { .add("trafficConfig", trafficConfig) .toString(); } + + EndpointPendingConfigPb toPb() { + EndpointPendingConfigPb pb = new EndpointPendingConfigPb(); + pb.setAutoCaptureConfig(autoCaptureConfig); + pb.setConfigVersion(configVersion); + pb.setServedEntities(servedEntities); + pb.setServedModels(servedModels); + pb.setStartTime(startTime); + pb.setTrafficConfig(trafficConfig); + + return pb; + } + + static EndpointPendingConfig fromPb(EndpointPendingConfigPb pb) { + EndpointPendingConfig model = new EndpointPendingConfig(); + model.setAutoCaptureConfig(pb.getAutoCaptureConfig()); + model.setConfigVersion(pb.getConfigVersion()); + model.setServedEntities(pb.getServedEntities()); + model.setServedModels(pb.getServedModels()); + model.setStartTime(pb.getStartTime()); + model.setTrafficConfig(pb.getTrafficConfig()); + + return model; + } + + public static class EndpointPendingConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + EndpointPendingConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointPendingConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointPendingConfigDeserializer + extends JsonDeserializer { + @Override + public EndpointPendingConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointPendingConfigPb pb = mapper.readValue(p, EndpointPendingConfigPb.class); + return EndpointPendingConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfigPb.java new file mode 100755 index 000000000..3bbb5bcae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfigPb.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointPendingConfigPb { + @JsonProperty("auto_capture_config") + private AutoCaptureConfigOutput autoCaptureConfig; + + @JsonProperty("config_version") + private Long configVersion; + + @JsonProperty("served_entities") + private Collection servedEntities; + + @JsonProperty("served_models") + private Collection servedModels; + + @JsonProperty("start_time") + private Long startTime; + + @JsonProperty("traffic_config") + private TrafficConfig trafficConfig; + + public EndpointPendingConfigPb setAutoCaptureConfig(AutoCaptureConfigOutput autoCaptureConfig) { + this.autoCaptureConfig = autoCaptureConfig; + return this; + } + + public AutoCaptureConfigOutput getAutoCaptureConfig() { + return autoCaptureConfig; + } + + public EndpointPendingConfigPb setConfigVersion(Long configVersion) { + this.configVersion = configVersion; + return this; + } + + public Long getConfigVersion() { + return configVersion; + } + + public EndpointPendingConfigPb setServedEntities(Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public EndpointPendingConfigPb setServedModels(Collection servedModels) { + this.servedModels = servedModels; + return this; + } + + public Collection getServedModels() { + return servedModels; + } + + public EndpointPendingConfigPb setStartTime(Long startTime) { + this.startTime = startTime; + return this; + } + + public Long getStartTime() { + return startTime; + } + + public EndpointPendingConfigPb setTrafficConfig(TrafficConfig trafficConfig) { + this.trafficConfig = trafficConfig; + return this; + } + + public TrafficConfig getTrafficConfig() { + return trafficConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointPendingConfigPb that = (EndpointPendingConfigPb) o; + return Objects.equals(autoCaptureConfig, that.autoCaptureConfig) + && Objects.equals(configVersion, that.configVersion) + && Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(servedModels, that.servedModels) + && Objects.equals(startTime, that.startTime) + && Objects.equals(trafficConfig, that.trafficConfig); + } + + @Override + public int hashCode() { + return Objects.hash( + autoCaptureConfig, configVersion, servedEntities, servedModels, startTime, trafficConfig); + } + + @Override + public String toString() { + return new ToStringer(EndpointPendingConfigPb.class) + .add("autoCaptureConfig", autoCaptureConfig) + .add("configVersion", configVersion) + .add("servedEntities", servedEntities) + .add("servedModels", servedModels) + .add("startTime", startTime) + .add("trafficConfig", trafficConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java index 16b6905f6..dc80f5556 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointState.EndpointStateSerializer.class) +@JsonDeserialize(using = EndpointState.EndpointStateDeserializer.class) public class EndpointState { /** * The state of an endpoint's config update. This informs the user if the pending_config is in @@ -15,7 +26,6 @@ public class EndpointState { * endpoint's config_update state value is IN_PROGRESS, another update can not be made until the * update completes or fails. */ - @JsonProperty("config_update") private EndpointStateConfigUpdate configUpdate; /** @@ -23,7 +33,6 @@ public class EndpointState { * READY if all of the served entities in its active configuration are ready. If any of the * actively served entities are in a non-ready state, the endpoint state will be NOT_READY. */ - @JsonProperty("ready") private EndpointStateReady ready; public EndpointState setConfigUpdate(EndpointStateConfigUpdate configUpdate) { @@ -64,4 +73,39 @@ public String toString() { .add("ready", ready) .toString(); } + + EndpointStatePb toPb() { + EndpointStatePb pb = new EndpointStatePb(); + pb.setConfigUpdate(configUpdate); + pb.setReady(ready); + + return pb; + } + + static EndpointState fromPb(EndpointStatePb pb) { + EndpointState model = new EndpointState(); + model.setConfigUpdate(pb.getConfigUpdate()); + model.setReady(pb.getReady()); + + return model; + } + + public static class EndpointStateSerializer extends JsonSerializer { + @Override + public void serialize(EndpointState value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointStatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointStateDeserializer extends JsonDeserializer { + @Override + public EndpointState deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointStatePb pb = mapper.readValue(p, EndpointStatePb.class); + return EndpointState.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStatePb.java new file mode 100755 index 000000000..df75a65f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStatePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointStatePb { + @JsonProperty("config_update") + private EndpointStateConfigUpdate configUpdate; + + @JsonProperty("ready") + private EndpointStateReady ready; + + public EndpointStatePb setConfigUpdate(EndpointStateConfigUpdate configUpdate) { + this.configUpdate = configUpdate; + return this; + } + + public EndpointStateConfigUpdate getConfigUpdate() { + return configUpdate; + } + + public EndpointStatePb setReady(EndpointStateReady ready) { + this.ready = ready; + return this; + } + + public EndpointStateReady getReady() { + return ready; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointStatePb that = (EndpointStatePb) o; + return Objects.equals(configUpdate, that.configUpdate) && Objects.equals(ready, that.ready); + } + + @Override + public int hashCode() { + return Objects.hash(configUpdate, ready); + } + + @Override + public String toString() { + return new ToStringer(EndpointStatePb.class) + .add("configUpdate", configUpdate) + .add("ready", ready) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTag.java index 51786d284..0943add18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTag.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointTag.EndpointTagSerializer.class) +@JsonDeserialize(using = EndpointTag.EndpointTagDeserializer.class) public class EndpointTag { /** Key field for a serving endpoint tag. */ - @JsonProperty("key") private String key; /** Optional value field for a serving endpoint tag. */ - @JsonProperty("value") private String value; public EndpointTag setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(EndpointTag.class).add("key", key).add("value", value).toString(); } + + EndpointTagPb toPb() { + EndpointTagPb pb = new EndpointTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static EndpointTag fromPb(EndpointTagPb pb) { + EndpointTag model = new EndpointTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class EndpointTagSerializer extends JsonSerializer { + @Override + public void serialize(EndpointTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointTagDeserializer extends JsonDeserializer { + @Override + public EndpointTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointTagPb pb = mapper.readValue(p, EndpointTagPb.class); + return EndpointTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagPb.java new file mode 100755 index 000000000..33bab1bae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public EndpointTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public EndpointTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointTagPb that = (EndpointTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(EndpointTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java index 1fb13a9b2..68c6c6737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointTags.EndpointTagsSerializer.class) +@JsonDeserialize(using = EndpointTags.EndpointTagsDeserializer.class) public class EndpointTags { /** */ - @JsonProperty("tags") private Collection tags; public EndpointTags setTags(Collection tags) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(EndpointTags.class).add("tags", tags).toString(); } + + EndpointTagsPb toPb() { + EndpointTagsPb pb = new EndpointTagsPb(); + pb.setTags(tags); + + return pb; + } + + static EndpointTags fromPb(EndpointTagsPb pb) { + EndpointTags model = new EndpointTags(); + model.setTags(pb.getTags()); + + return model; + } + + public static class EndpointTagsSerializer extends JsonSerializer { + @Override + public void serialize(EndpointTags value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointTagsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointTagsDeserializer extends JsonDeserializer { + @Override + public EndpointTags deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointTagsPb pb = mapper.readValue(p, EndpointTagsPb.class); + return EndpointTags.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagsPb.java similarity index 50% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagsPb.java index 778e1d961..2f5c625e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTagsPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.serving; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,35 +9,34 @@ import java.util.Objects; @Generated -public class PollQueryStatusResponse { - /** */ - @JsonProperty("data") - private Collection data; +class EndpointTagsPb { + @JsonProperty("tags") + private Collection tags; - public PollQueryStatusResponse setData(Collection data) { - this.data = data; + public EndpointTagsPb setTags(Collection tags) { + this.tags = tags; return this; } - public Collection getData() { - return data; + public Collection getTags() { + return tags; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - PollQueryStatusResponse that = (PollQueryStatusResponse) o; - return Objects.equals(data, that.data); + EndpointTagsPb that = (EndpointTagsPb) o; + return Objects.equals(tags, that.tags); } @Override public int hashCode() { - return Objects.hash(data); + return Objects.hash(tags); } @Override public String toString() { - return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString(); + return new ToStringer(EndpointTagsPb.class).add("tags", tags).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java index 02e5a881b..172179aef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get metrics of a serving endpoint */ @Generated +@JsonSerialize(using = ExportMetricsRequest.ExportMetricsRequestSerializer.class) +@JsonDeserialize(using = ExportMetricsRequest.ExportMetricsRequestDeserializer.class) public class ExportMetricsRequest { /** The name of the serving endpoint to retrieve metrics for. This field is required. */ - @JsonIgnore private String name; + private String name; public ExportMetricsRequest setName(String name) { this.name = name; @@ -39,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(ExportMetricsRequest.class).add("name", name).toString(); } + + ExportMetricsRequestPb toPb() { + ExportMetricsRequestPb pb = new ExportMetricsRequestPb(); + pb.setName(name); + + return pb; + } + + static ExportMetricsRequest fromPb(ExportMetricsRequestPb pb) { + ExportMetricsRequest model = new ExportMetricsRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class ExportMetricsRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ExportMetricsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportMetricsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportMetricsRequestDeserializer + extends JsonDeserializer { + @Override + public ExportMetricsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportMetricsRequestPb pb = mapper.readValue(p, ExportMetricsRequestPb.class); + return ExportMetricsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequestPb.java new file mode 100755 index 000000000..05164ba87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get metrics of a serving endpoint */ +@Generated +class ExportMetricsRequestPb { + @JsonIgnore private String name; + + public ExportMetricsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportMetricsRequestPb that = (ExportMetricsRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(ExportMetricsRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponse.java index 922727b44..a0cee1925 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; @Generated +@JsonSerialize(using = ExportMetricsResponse.ExportMetricsResponseSerializer.class) +@JsonDeserialize(using = ExportMetricsResponse.ExportMetricsResponseDeserializer.class) public class ExportMetricsResponse { /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; public ExportMetricsResponse setContents(InputStream contents) { this.contents = contents; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ExportMetricsResponse.class).add("contents", contents).toString(); } + + ExportMetricsResponsePb toPb() { + ExportMetricsResponsePb pb = new ExportMetricsResponsePb(); + pb.setContents(contents); + + return pb; + } + + static ExportMetricsResponse fromPb(ExportMetricsResponsePb pb) { + ExportMetricsResponse model = new ExportMetricsResponse(); + model.setContents(pb.getContents()); + + return model; + } + + public static class ExportMetricsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ExportMetricsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportMetricsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportMetricsResponseDeserializer + extends JsonDeserializer { + @Override + public ExportMetricsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportMetricsResponsePb pb = mapper.readValue(p, ExportMetricsResponsePb.class); + return ExportMetricsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponsePb.java new file mode 100755 index 000000000..8736b8bf8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +@Generated +class ExportMetricsResponsePb { + @JsonIgnore private InputStream contents; + + public ExportMetricsResponsePb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportMetricsResponsePb that = (ExportMetricsResponsePb) o; + return Objects.equals(contents, that.contents); + } + + @Override + public int hashCode() { + return Objects.hash(contents); + } + + @Override + public String toString() { + return new ToStringer(ExportMetricsResponsePb.class).add("contents", contents).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java index ab122974a..9a25382cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java @@ -4,37 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Simple Proto message for testing */ @Generated +@JsonSerialize(using = ExternalFunctionRequest.ExternalFunctionRequestSerializer.class) +@JsonDeserialize(using = ExternalFunctionRequest.ExternalFunctionRequestDeserializer.class) public class ExternalFunctionRequest { /** The connection name to use. This is required to identify the external connection. */ - @JsonProperty("connection_name") private String connectionName; /** * Additional headers for the request. If not provided, only auth headers from connections would * be passed. */ - @JsonProperty("headers") private String headers; /** The JSON payload to send in the request body. */ - @JsonProperty("json") private String json; /** The HTTP method to use (e.g., 'GET', 'POST'). */ - @JsonProperty("method") private ExternalFunctionRequestHttpMethod method; /** Query parameters for the request. */ - @JsonProperty("params") private String params; /** The relative path for the API endpoint. This is required. */ - @JsonProperty("path") private String path; public ExternalFunctionRequest setConnectionName(String connectionName) { @@ -120,4 +125,51 @@ public String toString() { .add("path", path) .toString(); } + + ExternalFunctionRequestPb toPb() { + ExternalFunctionRequestPb pb = new ExternalFunctionRequestPb(); + pb.setConnectionName(connectionName); + pb.setHeaders(headers); + pb.setJson(json); + pb.setMethod(method); + pb.setParams(params); + pb.setPath(path); + + return pb; + } + + static ExternalFunctionRequest fromPb(ExternalFunctionRequestPb pb) { + ExternalFunctionRequest model = new ExternalFunctionRequest(); + model.setConnectionName(pb.getConnectionName()); + model.setHeaders(pb.getHeaders()); + model.setJson(pb.getJson()); + model.setMethod(pb.getMethod()); + model.setParams(pb.getParams()); + model.setPath(pb.getPath()); + + return model; + } + + public static class ExternalFunctionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ExternalFunctionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalFunctionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalFunctionRequestDeserializer + extends JsonDeserializer { + @Override + public ExternalFunctionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalFunctionRequestPb pb = mapper.readValue(p, ExternalFunctionRequestPb.class); + return ExternalFunctionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestPb.java new file mode 100755 index 000000000..d37d8ed58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Simple Proto message for testing */ +@Generated +class ExternalFunctionRequestPb { + @JsonProperty("connection_name") + private String connectionName; + + @JsonProperty("headers") + private String headers; + + @JsonProperty("json") + private String json; + + @JsonProperty("method") + private ExternalFunctionRequestHttpMethod method; + + @JsonProperty("params") + private String params; + + @JsonProperty("path") + private String path; + + public ExternalFunctionRequestPb setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + + public ExternalFunctionRequestPb setHeaders(String headers) { + this.headers = headers; + return this; + } + + public String getHeaders() { + return headers; + } + + public ExternalFunctionRequestPb setJson(String json) { + this.json = json; + return this; + } + + public String getJson() { + return json; + } + + public ExternalFunctionRequestPb setMethod(ExternalFunctionRequestHttpMethod method) { + this.method = method; + return this; + } + + public ExternalFunctionRequestHttpMethod getMethod() { + return method; + } + + public ExternalFunctionRequestPb setParams(String params) { + this.params = params; + return this; + } + + public String getParams() { + return params; + } + + public ExternalFunctionRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalFunctionRequestPb that = (ExternalFunctionRequestPb) o; + return Objects.equals(connectionName, that.connectionName) + && Objects.equals(headers, that.headers) + && Objects.equals(json, that.json) + && Objects.equals(method, that.method) + && Objects.equals(params, that.params) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(connectionName, headers, json, method, params, path); + } + + @Override + public String toString() { + return new ToStringer(ExternalFunctionRequestPb.class) + .add("connectionName", connectionName) + .add("headers", headers) + .add("json", json) + .add("method", method) + .add("params", params) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java index 4c013e026..451d35ee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java @@ -4,51 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalModel.ExternalModelSerializer.class) +@JsonDeserialize(using = ExternalModel.ExternalModelDeserializer.class) public class ExternalModel { /** AI21Labs Config. Only required if the provider is 'ai21labs'. */ - @JsonProperty("ai21labs_config") private Ai21LabsConfig ai21labsConfig; /** Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'. */ - @JsonProperty("amazon_bedrock_config") private AmazonBedrockConfig amazonBedrockConfig; /** Anthropic Config. Only required if the provider is 'anthropic'. */ - @JsonProperty("anthropic_config") private AnthropicConfig anthropicConfig; /** Cohere Config. Only required if the provider is 'cohere'. */ - @JsonProperty("cohere_config") private CohereConfig cohereConfig; /** Custom Provider Config. Only required if the provider is 'custom'. */ - @JsonProperty("custom_provider_config") private CustomProviderConfig customProviderConfig; /** * Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'. */ - @JsonProperty("databricks_model_serving_config") private DatabricksModelServingConfig databricksModelServingConfig; /** Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'. */ - @JsonProperty("google_cloud_vertex_ai_config") private GoogleCloudVertexAiConfig googleCloudVertexAiConfig; /** The name of the external model. */ - @JsonProperty("name") private String name; /** OpenAI Config. Only required if the provider is 'openai'. */ - @JsonProperty("openai_config") private OpenAiConfig openaiConfig; /** PaLM Config. Only required if the provider is 'palm'. */ - @JsonProperty("palm_config") private PaLmConfig palmConfig; /** @@ -56,11 +57,9 @@ public class ExternalModel { * 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', * 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'. */ - @JsonProperty("provider") private ExternalModelProvider provider; /** The task type of the external model. */ - @JsonProperty("task") private String task; public ExternalModel setAi21labsConfig(Ai21LabsConfig ai21labsConfig) { @@ -226,4 +225,59 @@ public String toString() { .add("task", task) .toString(); } + + ExternalModelPb toPb() { + ExternalModelPb pb = new ExternalModelPb(); + pb.setAi21labsConfig(ai21labsConfig); + pb.setAmazonBedrockConfig(amazonBedrockConfig); + pb.setAnthropicConfig(anthropicConfig); + pb.setCohereConfig(cohereConfig); + pb.setCustomProviderConfig(customProviderConfig); + pb.setDatabricksModelServingConfig(databricksModelServingConfig); + pb.setGoogleCloudVertexAiConfig(googleCloudVertexAiConfig); + pb.setName(name); + pb.setOpenaiConfig(openaiConfig); + pb.setPalmConfig(palmConfig); + pb.setProvider(provider); + pb.setTask(task); + + return pb; + } + + static ExternalModel fromPb(ExternalModelPb pb) { + ExternalModel model = new ExternalModel(); + model.setAi21labsConfig(pb.getAi21labsConfig()); + model.setAmazonBedrockConfig(pb.getAmazonBedrockConfig()); + model.setAnthropicConfig(pb.getAnthropicConfig()); + model.setCohereConfig(pb.getCohereConfig()); + model.setCustomProviderConfig(pb.getCustomProviderConfig()); + model.setDatabricksModelServingConfig(pb.getDatabricksModelServingConfig()); + model.setGoogleCloudVertexAiConfig(pb.getGoogleCloudVertexAiConfig()); + model.setName(pb.getName()); + model.setOpenaiConfig(pb.getOpenaiConfig()); + model.setPalmConfig(pb.getPalmConfig()); + model.setProvider(pb.getProvider()); + model.setTask(pb.getTask()); + + return model; + } + + public static class ExternalModelSerializer extends JsonSerializer { + @Override + public void serialize(ExternalModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalModelDeserializer extends JsonDeserializer { + @Override + public ExternalModel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalModelPb pb = mapper.readValue(p, ExternalModelPb.class); + return ExternalModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelPb.java new file mode 100755 index 000000000..2570cf67a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelPb.java @@ -0,0 +1,211 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalModelPb { + @JsonProperty("ai21labs_config") + private Ai21LabsConfig ai21labsConfig; + + @JsonProperty("amazon_bedrock_config") + private AmazonBedrockConfig amazonBedrockConfig; + + @JsonProperty("anthropic_config") + private AnthropicConfig anthropicConfig; + + @JsonProperty("cohere_config") + private CohereConfig cohereConfig; + + @JsonProperty("custom_provider_config") + private CustomProviderConfig customProviderConfig; + + @JsonProperty("databricks_model_serving_config") + private DatabricksModelServingConfig databricksModelServingConfig; + + @JsonProperty("google_cloud_vertex_ai_config") + private GoogleCloudVertexAiConfig googleCloudVertexAiConfig; + + @JsonProperty("name") + private String name; + + @JsonProperty("openai_config") + private OpenAiConfig openaiConfig; + + @JsonProperty("palm_config") + private PaLmConfig palmConfig; + + @JsonProperty("provider") + private ExternalModelProvider provider; + + @JsonProperty("task") + private String task; + + public ExternalModelPb setAi21labsConfig(Ai21LabsConfig ai21labsConfig) { + this.ai21labsConfig = ai21labsConfig; + return this; + } + + public Ai21LabsConfig getAi21labsConfig() { + return ai21labsConfig; + } + + public ExternalModelPb setAmazonBedrockConfig(AmazonBedrockConfig amazonBedrockConfig) { + this.amazonBedrockConfig = amazonBedrockConfig; + return this; + } + + public AmazonBedrockConfig getAmazonBedrockConfig() { + return amazonBedrockConfig; + } + + public ExternalModelPb setAnthropicConfig(AnthropicConfig anthropicConfig) { + this.anthropicConfig = anthropicConfig; + return this; + } + + public AnthropicConfig getAnthropicConfig() { + return anthropicConfig; + } + + public ExternalModelPb setCohereConfig(CohereConfig cohereConfig) { + this.cohereConfig = cohereConfig; + return this; + } + + public CohereConfig getCohereConfig() { + return cohereConfig; + } + + public ExternalModelPb setCustomProviderConfig(CustomProviderConfig customProviderConfig) { + this.customProviderConfig = customProviderConfig; + return this; + } + + public CustomProviderConfig getCustomProviderConfig() { + return customProviderConfig; + } + + public ExternalModelPb setDatabricksModelServingConfig( + DatabricksModelServingConfig databricksModelServingConfig) { + this.databricksModelServingConfig = databricksModelServingConfig; + return this; + } + + public DatabricksModelServingConfig getDatabricksModelServingConfig() { + return databricksModelServingConfig; + } + + public ExternalModelPb setGoogleCloudVertexAiConfig( + GoogleCloudVertexAiConfig googleCloudVertexAiConfig) { + this.googleCloudVertexAiConfig = googleCloudVertexAiConfig; + return this; + } + + public GoogleCloudVertexAiConfig getGoogleCloudVertexAiConfig() { + return googleCloudVertexAiConfig; + } + + public ExternalModelPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalModelPb setOpenaiConfig(OpenAiConfig openaiConfig) { + this.openaiConfig = openaiConfig; + return this; + } + + public OpenAiConfig getOpenaiConfig() { + return openaiConfig; + } + + public ExternalModelPb setPalmConfig(PaLmConfig palmConfig) { + this.palmConfig = palmConfig; + return this; + } + + public PaLmConfig getPalmConfig() { + return palmConfig; + } + + public ExternalModelPb setProvider(ExternalModelProvider provider) { + this.provider = provider; + return this; + } + + public ExternalModelProvider getProvider() { + return provider; + } + + public ExternalModelPb setTask(String task) { + this.task = task; + return this; + } + + public String getTask() { + return task; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalModelPb that = (ExternalModelPb) o; + return Objects.equals(ai21labsConfig, that.ai21labsConfig) + && Objects.equals(amazonBedrockConfig, that.amazonBedrockConfig) + && Objects.equals(anthropicConfig, that.anthropicConfig) + && Objects.equals(cohereConfig, that.cohereConfig) + && Objects.equals(customProviderConfig, that.customProviderConfig) + && Objects.equals(databricksModelServingConfig, that.databricksModelServingConfig) + && Objects.equals(googleCloudVertexAiConfig, that.googleCloudVertexAiConfig) + && Objects.equals(name, that.name) + && Objects.equals(openaiConfig, that.openaiConfig) + && Objects.equals(palmConfig, that.palmConfig) + && Objects.equals(provider, that.provider) + && Objects.equals(task, that.task); + } + + @Override + public int hashCode() { + return Objects.hash( + ai21labsConfig, + amazonBedrockConfig, + anthropicConfig, + cohereConfig, + customProviderConfig, + databricksModelServingConfig, + googleCloudVertexAiConfig, + name, + openaiConfig, + palmConfig, + provider, + task); + } + + @Override + public String toString() { + return new ToStringer(ExternalModelPb.class) + .add("ai21labsConfig", ai21labsConfig) + .add("amazonBedrockConfig", amazonBedrockConfig) + .add("anthropicConfig", anthropicConfig) + .add("cohereConfig", cohereConfig) + .add("customProviderConfig", customProviderConfig) + .add("databricksModelServingConfig", databricksModelServingConfig) + .add("googleCloudVertexAiConfig", googleCloudVertexAiConfig) + .add("name", name) + .add("openaiConfig", openaiConfig) + .add("palmConfig", palmConfig) + .add("provider", provider) + .add("task", task) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElement.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElement.java index b977d3da4..933e605ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElement.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElement.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalModelUsageElement.ExternalModelUsageElementSerializer.class) +@JsonDeserialize(using = ExternalModelUsageElement.ExternalModelUsageElementDeserializer.class) public class ExternalModelUsageElement { /** The number of tokens in the chat/completions response. */ - @JsonProperty("completion_tokens") private Long completionTokens; /** The number of tokens in the prompt. */ - @JsonProperty("prompt_tokens") private Long promptTokens; /** The total number of tokens in the prompt and response. */ - @JsonProperty("total_tokens") private Long totalTokens; public ExternalModelUsageElement setCompletionTokens(Long completionTokens) { @@ -71,4 +79,45 @@ public String toString() { .add("totalTokens", totalTokens) .toString(); } + + ExternalModelUsageElementPb toPb() { + ExternalModelUsageElementPb pb = new ExternalModelUsageElementPb(); + pb.setCompletionTokens(completionTokens); + pb.setPromptTokens(promptTokens); + pb.setTotalTokens(totalTokens); + + return pb; + } + + static ExternalModelUsageElement fromPb(ExternalModelUsageElementPb pb) { + ExternalModelUsageElement model = new ExternalModelUsageElement(); + model.setCompletionTokens(pb.getCompletionTokens()); + model.setPromptTokens(pb.getPromptTokens()); + model.setTotalTokens(pb.getTotalTokens()); + + return model; + } + + public static class ExternalModelUsageElementSerializer + extends JsonSerializer { + @Override + public void serialize( + ExternalModelUsageElement value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalModelUsageElementPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalModelUsageElementDeserializer + extends JsonDeserializer { + @Override + public ExternalModelUsageElement deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalModelUsageElementPb pb = mapper.readValue(p, ExternalModelUsageElementPb.class); + return ExternalModelUsageElement.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElementPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElementPb.java new file mode 100755 index 000000000..131c0b561 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElementPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalModelUsageElementPb { + @JsonProperty("completion_tokens") + private Long completionTokens; + + @JsonProperty("prompt_tokens") + private Long promptTokens; + + @JsonProperty("total_tokens") + private Long totalTokens; + + public ExternalModelUsageElementPb setCompletionTokens(Long completionTokens) { + this.completionTokens = completionTokens; + return this; + } + + public Long getCompletionTokens() { + return completionTokens; + } + + public ExternalModelUsageElementPb setPromptTokens(Long promptTokens) { + this.promptTokens = promptTokens; + return this; + } + + public Long getPromptTokens() { + return promptTokens; + } + + public ExternalModelUsageElementPb setTotalTokens(Long totalTokens) { + this.totalTokens = totalTokens; + return this; + } + + public Long getTotalTokens() { + return totalTokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalModelUsageElementPb that = (ExternalModelUsageElementPb) o; + return Objects.equals(completionTokens, that.completionTokens) + && Objects.equals(promptTokens, that.promptTokens) + && Objects.equals(totalTokens, that.totalTokens); + } + + @Override + public int hashCode() { + return Objects.hash(completionTokens, promptTokens, totalTokens); + } + + @Override + public String toString() { + return new ToStringer(ExternalModelUsageElementPb.class) + .add("completionTokens", completionTokens) + .add("promptTokens", promptTokens) + .add("totalTokens", totalTokens) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java index 2d521100a..f0158cd0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FallbackConfig.FallbackConfigSerializer.class) +@JsonDeserialize(using = FallbackConfig.FallbackConfigDeserializer.class) public class FallbackConfig { /** * Whether to enable traffic fallback. When a served entity in the serving endpoint returns @@ -16,7 +27,6 @@ public class FallbackConfig { * successful response is returned. If all attempts fail, return the last response with the error * code. */ - @JsonProperty("enabled") private Boolean enabled; public FallbackConfig setEnabled(Boolean enabled) { @@ -45,4 +55,38 @@ public int hashCode() { public String toString() { return new ToStringer(FallbackConfig.class).add("enabled", enabled).toString(); } + + FallbackConfigPb toPb() { + FallbackConfigPb pb = new FallbackConfigPb(); + pb.setEnabled(enabled); + + return pb; + } + + static FallbackConfig fromPb(FallbackConfigPb pb) { + FallbackConfig model = new FallbackConfig(); + model.setEnabled(pb.getEnabled()); + + return model; + } + + public static class FallbackConfigSerializer extends JsonSerializer { + @Override + public void serialize(FallbackConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FallbackConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FallbackConfigDeserializer extends JsonDeserializer { + @Override + public FallbackConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FallbackConfigPb pb = mapper.readValue(p, FallbackConfigPb.class); + return FallbackConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfigPb.java new file mode 100755 index 000000000..9cb291a8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfigPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FallbackConfigPb { + @JsonProperty("enabled") + private Boolean enabled; + + public FallbackConfigPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FallbackConfigPb that = (FallbackConfigPb) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(FallbackConfigPb.class).add("enabled", enabled).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java index c5a818bcd..6a72c4c42 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,21 +21,19 @@ * customers. */ @Generated +@JsonSerialize(using = FoundationModel.FoundationModelSerializer.class) +@JsonDeserialize(using = FoundationModel.FoundationModelDeserializer.class) public class FoundationModel { /** */ - @JsonProperty("description") private String description; /** */ - @JsonProperty("display_name") private String displayName; /** */ - @JsonProperty("docs") private String docs; /** */ - @JsonProperty("name") private String name; public FoundationModel setDescription(String description) { @@ -90,4 +97,44 @@ public String toString() { .add("name", name) .toString(); } + + FoundationModelPb toPb() { + FoundationModelPb pb = new FoundationModelPb(); + pb.setDescription(description); + pb.setDisplayName(displayName); + pb.setDocs(docs); + pb.setName(name); + + return pb; + } + + static FoundationModel fromPb(FoundationModelPb pb) { + FoundationModel model = new FoundationModel(); + model.setDescription(pb.getDescription()); + model.setDisplayName(pb.getDisplayName()); + model.setDocs(pb.getDocs()); + model.setName(pb.getName()); + + return model; + } + + public static class FoundationModelSerializer extends JsonSerializer { + @Override + public void serialize(FoundationModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FoundationModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FoundationModelDeserializer extends JsonDeserializer { + @Override + public FoundationModel deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FoundationModelPb pb = mapper.readValue(p, FoundationModelPb.class); + return FoundationModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModelPb.java new file mode 100755 index 000000000..e80ab38d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModelPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * All fields are not sensitive as they are hard-coded in the system and made available to + * customers. + */ +@Generated +class FoundationModelPb { + @JsonProperty("description") + private String description; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("docs") + private String docs; + + @JsonProperty("name") + private String name; + + public FoundationModelPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public FoundationModelPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public FoundationModelPb setDocs(String docs) { + this.docs = docs; + return this; + } + + public String getDocs() { + return docs; + } + + public FoundationModelPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FoundationModelPb that = (FoundationModelPb) o; + return Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(docs, that.docs) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(description, displayName, docs, name); + } + + @Override + public String toString() { + return new ToStringer(FoundationModelPb.class) + .add("description", description) + .add("displayName", displayName) + .add("docs", docs) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequest.java index ad05234d3..88b9c411e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the schema for a serving endpoint */ @Generated +@JsonSerialize(using = GetOpenApiRequest.GetOpenApiRequestSerializer.class) +@JsonDeserialize(using = GetOpenApiRequest.GetOpenApiRequestDeserializer.class) public class GetOpenApiRequest { /** The name of the serving endpoint that the served model belongs to. This field is required. */ - @JsonIgnore private String name; + private String name; public GetOpenApiRequest setName(String name) { this.name = name; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetOpenApiRequest.class).add("name", name).toString(); } + + GetOpenApiRequestPb toPb() { + GetOpenApiRequestPb pb = new GetOpenApiRequestPb(); + pb.setName(name); + + return pb; + } + + static GetOpenApiRequest fromPb(GetOpenApiRequestPb pb) { + GetOpenApiRequest model = new GetOpenApiRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetOpenApiRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetOpenApiRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetOpenApiRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetOpenApiRequestDeserializer extends JsonDeserializer { + @Override + public GetOpenApiRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetOpenApiRequestPb pb = mapper.readValue(p, GetOpenApiRequestPb.class); + return GetOpenApiRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequestPb.java new file mode 100755 index 000000000..1f0de6402 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the schema for a serving endpoint */ +@Generated +class GetOpenApiRequestPb { + @JsonIgnore private String name; + + public GetOpenApiRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOpenApiRequestPb that = (GetOpenApiRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetOpenApiRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java index 6512b0530..a3ec9acf3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; @Generated +@JsonSerialize(using = GetOpenApiResponse.GetOpenApiResponseSerializer.class) +@JsonDeserialize(using = GetOpenApiResponse.GetOpenApiResponseDeserializer.class) public class GetOpenApiResponse { /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; public GetOpenApiResponse setContents(InputStream contents) { this.contents = contents; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetOpenApiResponse.class).add("contents", contents).toString(); } + + GetOpenApiResponsePb toPb() { + GetOpenApiResponsePb pb = new GetOpenApiResponsePb(); + pb.setContents(contents); + + return pb; + } + + static GetOpenApiResponse fromPb(GetOpenApiResponsePb pb) { + GetOpenApiResponse model = new GetOpenApiResponse(); + model.setContents(pb.getContents()); + + return model; + } + + public static class GetOpenApiResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetOpenApiResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetOpenApiResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetOpenApiResponseDeserializer extends JsonDeserializer { + @Override + public GetOpenApiResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetOpenApiResponsePb pb = mapper.readValue(p, GetOpenApiResponsePb.class); + return GetOpenApiResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponsePb.java new file mode 100755 index 000000000..558592cf2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +@Generated +class GetOpenApiResponsePb { + @JsonIgnore private InputStream contents; + + public GetOpenApiResponsePb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOpenApiResponsePb that = (GetOpenApiResponsePb) o; + return Objects.equals(contents, that.contents); + } + + @Override + public int hashCode() { + return Objects.hash(contents); + } + + @Override + public String toString() { + return new ToStringer(GetOpenApiResponsePb.class).add("contents", contents).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequest.java index df34d1f50..03b11db2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get serving endpoint permission levels */ @Generated +@JsonSerialize( + using = + GetServingEndpointPermissionLevelsRequest + .GetServingEndpointPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = + GetServingEndpointPermissionLevelsRequest + .GetServingEndpointPermissionLevelsRequestDeserializer.class) public class GetServingEndpointPermissionLevelsRequest { /** The serving endpoint for which to get or manage permissions. */ - @JsonIgnore private String servingEndpointId; + private String servingEndpointId; public GetServingEndpointPermissionLevelsRequest setServingEndpointId(String servingEndpointId) { this.servingEndpointId = servingEndpointId; @@ -41,4 +58,47 @@ public String toString() { .add("servingEndpointId", servingEndpointId) .toString(); } + + GetServingEndpointPermissionLevelsRequestPb toPb() { + GetServingEndpointPermissionLevelsRequestPb pb = + new GetServingEndpointPermissionLevelsRequestPb(); + pb.setServingEndpointId(servingEndpointId); + + return pb; + } + + static GetServingEndpointPermissionLevelsRequest fromPb( + GetServingEndpointPermissionLevelsRequestPb pb) { + GetServingEndpointPermissionLevelsRequest model = + new GetServingEndpointPermissionLevelsRequest(); + model.setServingEndpointId(pb.getServingEndpointId()); + + return model; + } + + public static class GetServingEndpointPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServingEndpointPermissionLevelsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetServingEndpointPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServingEndpointPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetServingEndpointPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServingEndpointPermissionLevelsRequestPb pb = + mapper.readValue(p, GetServingEndpointPermissionLevelsRequestPb.class); + return GetServingEndpointPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequestPb.java new file mode 100755 index 000000000..21daa6cad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get serving endpoint permission levels */ +@Generated +class GetServingEndpointPermissionLevelsRequestPb { + @JsonIgnore private String servingEndpointId; + + public GetServingEndpointPermissionLevelsRequestPb setServingEndpointId( + String servingEndpointId) { + this.servingEndpointId = servingEndpointId; + return this; + } + + public String getServingEndpointId() { + return servingEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServingEndpointPermissionLevelsRequestPb that = + (GetServingEndpointPermissionLevelsRequestPb) o; + return Objects.equals(servingEndpointId, that.servingEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash(servingEndpointId); + } + + @Override + public String toString() { + return new ToStringer(GetServingEndpointPermissionLevelsRequestPb.class) + .add("servingEndpointId", servingEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponse.java index 631a38dc5..3ec71e30c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetServingEndpointPermissionLevelsResponse + .GetServingEndpointPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetServingEndpointPermissionLevelsResponse + .GetServingEndpointPermissionLevelsResponseDeserializer.class) public class GetServingEndpointPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetServingEndpointPermissionLevelsResponse setPermissionLevels( @@ -44,4 +60,47 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetServingEndpointPermissionLevelsResponsePb toPb() { + GetServingEndpointPermissionLevelsResponsePb pb = + new GetServingEndpointPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetServingEndpointPermissionLevelsResponse fromPb( + GetServingEndpointPermissionLevelsResponsePb pb) { + GetServingEndpointPermissionLevelsResponse model = + new GetServingEndpointPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetServingEndpointPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServingEndpointPermissionLevelsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetServingEndpointPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServingEndpointPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetServingEndpointPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServingEndpointPermissionLevelsResponsePb pb = + mapper.readValue(p, GetServingEndpointPermissionLevelsResponsePb.class); + return GetServingEndpointPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponsePb.java new file mode 100755 index 000000000..edffaa473 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionLevelsResponsePb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetServingEndpointPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetServingEndpointPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServingEndpointPermissionLevelsResponsePb that = + (GetServingEndpointPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetServingEndpointPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequest.java index 3e0d5cdd5..7f934ac8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get serving endpoint permissions */ @Generated +@JsonSerialize( + using = + GetServingEndpointPermissionsRequest.GetServingEndpointPermissionsRequestSerializer.class) +@JsonDeserialize( + using = + GetServingEndpointPermissionsRequest.GetServingEndpointPermissionsRequestDeserializer.class) public class GetServingEndpointPermissionsRequest { /** The serving endpoint for which to get or manage permissions. */ - @JsonIgnore private String servingEndpointId; + private String servingEndpointId; public GetServingEndpointPermissionsRequest setServingEndpointId(String servingEndpointId) { this.servingEndpointId = servingEndpointId; @@ -41,4 +56,42 @@ public String toString() { .add("servingEndpointId", servingEndpointId) .toString(); } + + GetServingEndpointPermissionsRequestPb toPb() { + GetServingEndpointPermissionsRequestPb pb = new GetServingEndpointPermissionsRequestPb(); + pb.setServingEndpointId(servingEndpointId); + + return pb; + } + + static GetServingEndpointPermissionsRequest fromPb(GetServingEndpointPermissionsRequestPb pb) { + GetServingEndpointPermissionsRequest model = new GetServingEndpointPermissionsRequest(); + model.setServingEndpointId(pb.getServingEndpointId()); + + return model; + } + + public static class GetServingEndpointPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServingEndpointPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetServingEndpointPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServingEndpointPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetServingEndpointPermissionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServingEndpointPermissionsRequestPb pb = + mapper.readValue(p, GetServingEndpointPermissionsRequestPb.class); + return GetServingEndpointPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequestPb.java new file mode 100755 index 000000000..2fb518781 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointPermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get serving endpoint permissions */ +@Generated +class GetServingEndpointPermissionsRequestPb { + @JsonIgnore private String servingEndpointId; + + public GetServingEndpointPermissionsRequestPb setServingEndpointId(String servingEndpointId) { + this.servingEndpointId = servingEndpointId; + return this; + } + + public String getServingEndpointId() { + return servingEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServingEndpointPermissionsRequestPb that = (GetServingEndpointPermissionsRequestPb) o; + return Objects.equals(servingEndpointId, that.servingEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash(servingEndpointId); + } + + @Override + public String toString() { + return new ToStringer(GetServingEndpointPermissionsRequestPb.class) + .add("servingEndpointId", servingEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequest.java index d31230a06..d6d9a3fa8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a single serving endpoint */ @Generated +@JsonSerialize(using = GetServingEndpointRequest.GetServingEndpointRequestSerializer.class) +@JsonDeserialize(using = GetServingEndpointRequest.GetServingEndpointRequestDeserializer.class) public class GetServingEndpointRequest { /** The name of the serving endpoint. This field is required. */ - @JsonIgnore private String name; + private String name; public GetServingEndpointRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetServingEndpointRequest.class).add("name", name).toString(); } + + GetServingEndpointRequestPb toPb() { + GetServingEndpointRequestPb pb = new GetServingEndpointRequestPb(); + pb.setName(name); + + return pb; + } + + static GetServingEndpointRequest fromPb(GetServingEndpointRequestPb pb) { + GetServingEndpointRequest model = new GetServingEndpointRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetServingEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetServingEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetServingEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetServingEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public GetServingEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetServingEndpointRequestPb pb = mapper.readValue(p, GetServingEndpointRequestPb.class); + return GetServingEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequestPb.java new file mode 100755 index 000000000..6a01a210a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetServingEndpointRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a single serving endpoint */ +@Generated +class GetServingEndpointRequestPb { + @JsonIgnore private String name; + + public GetServingEndpointRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServingEndpointRequestPb that = (GetServingEndpointRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetServingEndpointRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfig.java index dd930b78e..34f717659 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfig.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GoogleCloudVertexAiConfig.GoogleCloudVertexAiConfigSerializer.class) +@JsonDeserialize(using = GoogleCloudVertexAiConfig.GoogleCloudVertexAiConfigDeserializer.class) public class GoogleCloudVertexAiConfig { /** * The Databricks secret key reference for a private key for the service account which has access @@ -18,7 +29,6 @@ public class GoogleCloudVertexAiConfig { *

[Best practices for managing service account keys]: * https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys */ - @JsonProperty("private_key") private String privateKey; /** @@ -30,11 +40,9 @@ public class GoogleCloudVertexAiConfig { *

[Best practices for managing service account keys]: * https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys */ - @JsonProperty("private_key_plaintext") private String privateKeyPlaintext; /** This is the Google Cloud project id that the service account is associated with. */ - @JsonProperty("project_id") private String projectId; /** @@ -43,7 +51,6 @@ public class GoogleCloudVertexAiConfig { * *

[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations */ - @JsonProperty("region") private String region; public GoogleCloudVertexAiConfig setPrivateKey(String privateKey) { @@ -107,4 +114,47 @@ public String toString() { .add("region", region) .toString(); } + + GoogleCloudVertexAiConfigPb toPb() { + GoogleCloudVertexAiConfigPb pb = new GoogleCloudVertexAiConfigPb(); + pb.setPrivateKey(privateKey); + pb.setPrivateKeyPlaintext(privateKeyPlaintext); + pb.setProjectId(projectId); + pb.setRegion(region); + + return pb; + } + + static GoogleCloudVertexAiConfig fromPb(GoogleCloudVertexAiConfigPb pb) { + GoogleCloudVertexAiConfig model = new GoogleCloudVertexAiConfig(); + model.setPrivateKey(pb.getPrivateKey()); + model.setPrivateKeyPlaintext(pb.getPrivateKeyPlaintext()); + model.setProjectId(pb.getProjectId()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class GoogleCloudVertexAiConfigSerializer + extends JsonSerializer { + @Override + public void serialize( + GoogleCloudVertexAiConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GoogleCloudVertexAiConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GoogleCloudVertexAiConfigDeserializer + extends JsonDeserializer { + @Override + public GoogleCloudVertexAiConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GoogleCloudVertexAiConfigPb pb = mapper.readValue(p, GoogleCloudVertexAiConfigPb.class); + return GoogleCloudVertexAiConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfigPb.java new file mode 100755 index 000000000..f4e998204 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GoogleCloudVertexAiConfigPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GoogleCloudVertexAiConfigPb { + @JsonProperty("private_key") + private String privateKey; + + @JsonProperty("private_key_plaintext") + private String privateKeyPlaintext; + + @JsonProperty("project_id") + private String projectId; + + @JsonProperty("region") + private String region; + + public GoogleCloudVertexAiConfigPb setPrivateKey(String privateKey) { + this.privateKey = privateKey; + return this; + } + + public String getPrivateKey() { + return privateKey; + } + + public GoogleCloudVertexAiConfigPb setPrivateKeyPlaintext(String privateKeyPlaintext) { + this.privateKeyPlaintext = privateKeyPlaintext; + return this; + } + + public String getPrivateKeyPlaintext() { + return privateKeyPlaintext; + } + + public GoogleCloudVertexAiConfigPb setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public GoogleCloudVertexAiConfigPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GoogleCloudVertexAiConfigPb that = (GoogleCloudVertexAiConfigPb) o; + return Objects.equals(privateKey, that.privateKey) + && Objects.equals(privateKeyPlaintext, that.privateKeyPlaintext) + && Objects.equals(projectId, that.projectId) + && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash(privateKey, privateKeyPlaintext, projectId, region); + } + + @Override + public String toString() { + return new ToStringer(GoogleCloudVertexAiConfigPb.class) + .add("privateKey", privateKey) + .add("privateKeyPlaintext", privateKeyPlaintext) + .add("projectId", projectId) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java index 57e459ca3..3d34409a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.io.InputStream; import java.util.Objects; @Generated +@JsonSerialize(using = HttpRequestResponse.HttpRequestResponseSerializer.class) +@JsonDeserialize(using = HttpRequestResponse.HttpRequestResponseDeserializer.class) public class HttpRequestResponse { /** */ - @JsonIgnore private InputStream contents; + private InputStream contents; public HttpRequestResponse setContents(InputStream contents) { this.contents = contents; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(HttpRequestResponse.class).add("contents", contents).toString(); } + + HttpRequestResponsePb toPb() { + HttpRequestResponsePb pb = new HttpRequestResponsePb(); + pb.setContents(contents); + + return pb; + } + + static HttpRequestResponse fromPb(HttpRequestResponsePb pb) { + HttpRequestResponse model = new HttpRequestResponse(); + model.setContents(pb.getContents()); + + return model; + } + + public static class HttpRequestResponseSerializer extends JsonSerializer { + @Override + public void serialize(HttpRequestResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + HttpRequestResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class HttpRequestResponseDeserializer + extends JsonDeserializer { + @Override + public HttpRequestResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + HttpRequestResponsePb pb = mapper.readValue(p, HttpRequestResponsePb.class); + return HttpRequestResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponsePb.java new file mode 100755 index 000000000..5cc1bd034 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.InputStream; +import java.util.Objects; + +@Generated +class HttpRequestResponsePb { + @JsonIgnore private InputStream contents; + + public HttpRequestResponsePb setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HttpRequestResponsePb that = (HttpRequestResponsePb) o; + return Objects.equals(contents, that.contents); + } + + @Override + public int hashCode() { + return Objects.hash(contents); + } + + @Override + public String toString() { + return new ToStringer(HttpRequestResponsePb.class).add("contents", contents).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponse.java index 9d0674c5d..c12c75839 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListEndpointsResponse.ListEndpointsResponseSerializer.class) +@JsonDeserialize(using = ListEndpointsResponse.ListEndpointsResponseDeserializer.class) public class ListEndpointsResponse { /** The list of endpoints. */ - @JsonProperty("endpoints") private Collection endpoints; public ListEndpointsResponse setEndpoints(Collection endpoints) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListEndpointsResponse.class).add("endpoints", endpoints).toString(); } + + ListEndpointsResponsePb toPb() { + ListEndpointsResponsePb pb = new ListEndpointsResponsePb(); + pb.setEndpoints(endpoints); + + return pb; + } + + static ListEndpointsResponse fromPb(ListEndpointsResponsePb pb) { + ListEndpointsResponse model = new ListEndpointsResponse(); + model.setEndpoints(pb.getEndpoints()); + + return model; + } + + public static class ListEndpointsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListEndpointsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListEndpointsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListEndpointsResponseDeserializer + extends JsonDeserializer { + @Override + public ListEndpointsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListEndpointsResponsePb pb = mapper.readValue(p, ListEndpointsResponsePb.class); + return ListEndpointsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponsePb.java new file mode 100755 index 000000000..da00a2a64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ListEndpointsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListEndpointsResponsePb { + @JsonProperty("endpoints") + private Collection endpoints; + + public ListEndpointsResponsePb setEndpoints(Collection endpoints) { + this.endpoints = endpoints; + return this; + } + + public Collection getEndpoints() { + return endpoints; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListEndpointsResponsePb that = (ListEndpointsResponsePb) o; + return Objects.equals(endpoints, that.endpoints); + } + + @Override + public int hashCode() { + return Objects.hash(endpoints); + } + + @Override + public String toString() { + return new ToStringer(ListEndpointsResponsePb.class).add("endpoints", endpoints).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java index 7c7606e9e..d425f2f10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the latest logs for a served model */ @Generated +@JsonSerialize(using = LogsRequest.LogsRequestSerializer.class) +@JsonDeserialize(using = LogsRequest.LogsRequestDeserializer.class) public class LogsRequest { /** The name of the serving endpoint that the served model belongs to. This field is required. */ - @JsonIgnore private String name; + private String name; /** The name of the served model that logs will be retrieved for. This field is required. */ - @JsonIgnore private String servedModelName; + private String servedModelName; public LogsRequest setName(String name) { this.name = name; @@ -54,4 +65,39 @@ public String toString() { .add("servedModelName", servedModelName) .toString(); } + + LogsRequestPb toPb() { + LogsRequestPb pb = new LogsRequestPb(); + pb.setName(name); + pb.setServedModelName(servedModelName); + + return pb; + } + + static LogsRequest fromPb(LogsRequestPb pb) { + LogsRequest model = new LogsRequest(); + model.setName(pb.getName()); + model.setServedModelName(pb.getServedModelName()); + + return model; + } + + public static class LogsRequestSerializer extends JsonSerializer { + @Override + public void serialize(LogsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LogsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LogsRequestDeserializer extends JsonDeserializer { + @Override + public LogsRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LogsRequestPb pb = mapper.readValue(p, LogsRequestPb.class); + return LogsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequestPb.java new file mode 100755 index 000000000..67efe4480 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the latest logs for a served model */ +@Generated +class LogsRequestPb { + @JsonIgnore private String name; + + @JsonIgnore private String servedModelName; + + public LogsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LogsRequestPb setServedModelName(String servedModelName) { + this.servedModelName = servedModelName; + return this; + } + + public String getServedModelName() { + return servedModelName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogsRequestPb that = (LogsRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(servedModelName, that.servedModelName); + } + + @Override + public int hashCode() { + return Objects.hash(name, servedModelName); + } + + @Override + public String toString() { + return new ToStringer(LogsRequestPb.class) + .add("name", name) + .add("servedModelName", servedModelName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java index 354f3eae3..02c333907 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,9 +21,10 @@ * Plane APIs. */ @Generated +@JsonSerialize(using = ModelDataPlaneInfo.ModelDataPlaneInfoSerializer.class) +@JsonDeserialize(using = ModelDataPlaneInfo.ModelDataPlaneInfoDeserializer.class) public class ModelDataPlaneInfo { /** Information required to query DataPlane API 'query' endpoint. */ - @JsonProperty("query_info") private DataPlaneInfo queryInfo; public ModelDataPlaneInfo setQueryInfo(DataPlaneInfo queryInfo) { @@ -43,4 +53,38 @@ public int hashCode() { public String toString() { return new ToStringer(ModelDataPlaneInfo.class).add("queryInfo", queryInfo).toString(); } + + ModelDataPlaneInfoPb toPb() { + ModelDataPlaneInfoPb pb = new ModelDataPlaneInfoPb(); + pb.setQueryInfo(queryInfo); + + return pb; + } + + static ModelDataPlaneInfo fromPb(ModelDataPlaneInfoPb pb) { + ModelDataPlaneInfo model = new ModelDataPlaneInfo(); + model.setQueryInfo(pb.getQueryInfo()); + + return model; + } + + public static class ModelDataPlaneInfoSerializer extends JsonSerializer { + @Override + public void serialize(ModelDataPlaneInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ModelDataPlaneInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ModelDataPlaneInfoDeserializer extends JsonDeserializer { + @Override + public ModelDataPlaneInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ModelDataPlaneInfoPb pb = mapper.readValue(p, ModelDataPlaneInfoPb.class); + return ModelDataPlaneInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfoPb.java new file mode 100755 index 000000000..575562512 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfoPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A representation of all DataPlaneInfo for operations that can be done on a model through Data + * Plane APIs. + */ +@Generated +class ModelDataPlaneInfoPb { + @JsonProperty("query_info") + private DataPlaneInfo queryInfo; + + public ModelDataPlaneInfoPb setQueryInfo(DataPlaneInfo queryInfo) { + this.queryInfo = queryInfo; + return this; + } + + public DataPlaneInfo getQueryInfo() { + return queryInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelDataPlaneInfoPb that = (ModelDataPlaneInfoPb) o; + return Objects.equals(queryInfo, that.queryInfo); + } + + @Override + public int hashCode() { + return Objects.hash(queryInfo); + } + + @Override + public String toString() { + return new ToStringer(ModelDataPlaneInfoPb.class).add("queryInfo", queryInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java index d7319fc73..2bb164a23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Configs needed to create an OpenAI model route. */ @Generated +@JsonSerialize(using = OpenAiConfig.OpenAiConfigSerializer.class) +@JsonDeserialize(using = OpenAiConfig.OpenAiConfigDeserializer.class) public class OpenAiConfig { /** This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID. */ - @JsonProperty("microsoft_entra_client_id") private String microsoftEntraClientId; /** @@ -20,7 +30,6 @@ public class OpenAiConfig { * `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the * following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`. */ - @JsonProperty("microsoft_entra_client_secret") private String microsoftEntraClientSecret; /** @@ -29,11 +38,9 @@ public class OpenAiConfig { * You must provide an API key using one of the following fields: `microsoft_entra_client_secret` * or `microsoft_entra_client_secret_plaintext`. */ - @JsonProperty("microsoft_entra_client_secret_plaintext") private String microsoftEntraClientSecretPlaintext; /** This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID. */ - @JsonProperty("microsoft_entra_tenant_id") private String microsoftEntraTenantId; /** @@ -42,7 +49,6 @@ public class OpenAiConfig { * other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI * base URL is used. */ - @JsonProperty("openai_api_base") private String openaiApiBase; /** @@ -50,7 +56,6 @@ public class OpenAiConfig { * you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an * API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`. */ - @JsonProperty("openai_api_key") private String openaiApiKey; /** @@ -58,7 +63,6 @@ public class OpenAiConfig { * prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide * an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`. */ - @JsonProperty("openai_api_key_plaintext") private String openaiApiKeyPlaintext; /** @@ -67,25 +71,21 @@ public class OpenAiConfig { * validation protocol. For access token validation, use azure. For authentication using Azure * Active Directory (Azure AD) use, azuread. */ - @JsonProperty("openai_api_type") private String openaiApiType; /** * This is an optional field to specify the OpenAI API version. For Azure OpenAI, this field is * required, and is the version of the Azure OpenAI service to utilize, specified by a date. */ - @JsonProperty("openai_api_version") private String openaiApiVersion; /** * This field is only required for Azure OpenAI and is the name of the deployment resource for the * Azure OpenAI service. */ - @JsonProperty("openai_deployment_name") private String openaiDeploymentName; /** This is an optional field to specify the organization in OpenAI or Azure OpenAI. */ - @JsonProperty("openai_organization") private String openaiOrganization; public OpenAiConfig setMicrosoftEntraClientId(String microsoftEntraClientId) { @@ -239,4 +239,57 @@ public String toString() { .add("openaiOrganization", openaiOrganization) .toString(); } + + OpenAiConfigPb toPb() { + OpenAiConfigPb pb = new OpenAiConfigPb(); + pb.setMicrosoftEntraClientId(microsoftEntraClientId); + pb.setMicrosoftEntraClientSecret(microsoftEntraClientSecret); + pb.setMicrosoftEntraClientSecretPlaintext(microsoftEntraClientSecretPlaintext); + pb.setMicrosoftEntraTenantId(microsoftEntraTenantId); + pb.setOpenaiApiBase(openaiApiBase); + pb.setOpenaiApiKey(openaiApiKey); + pb.setOpenaiApiKeyPlaintext(openaiApiKeyPlaintext); + pb.setOpenaiApiType(openaiApiType); + pb.setOpenaiApiVersion(openaiApiVersion); + pb.setOpenaiDeploymentName(openaiDeploymentName); + pb.setOpenaiOrganization(openaiOrganization); + + return pb; + } + + static OpenAiConfig fromPb(OpenAiConfigPb pb) { + OpenAiConfig model = new OpenAiConfig(); + model.setMicrosoftEntraClientId(pb.getMicrosoftEntraClientId()); + model.setMicrosoftEntraClientSecret(pb.getMicrosoftEntraClientSecret()); + model.setMicrosoftEntraClientSecretPlaintext(pb.getMicrosoftEntraClientSecretPlaintext()); + model.setMicrosoftEntraTenantId(pb.getMicrosoftEntraTenantId()); + model.setOpenaiApiBase(pb.getOpenaiApiBase()); + model.setOpenaiApiKey(pb.getOpenaiApiKey()); + model.setOpenaiApiKeyPlaintext(pb.getOpenaiApiKeyPlaintext()); + model.setOpenaiApiType(pb.getOpenaiApiType()); + model.setOpenaiApiVersion(pb.getOpenaiApiVersion()); + model.setOpenaiDeploymentName(pb.getOpenaiDeploymentName()); + model.setOpenaiOrganization(pb.getOpenaiOrganization()); + + return model; + } + + public static class OpenAiConfigSerializer extends JsonSerializer { + @Override + public void serialize(OpenAiConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OpenAiConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OpenAiConfigDeserializer extends JsonDeserializer { + @Override + public OpenAiConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OpenAiConfigPb pb = mapper.readValue(p, OpenAiConfigPb.class); + return OpenAiConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfigPb.java new file mode 100755 index 000000000..039844317 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfigPb.java @@ -0,0 +1,197 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configs needed to create an OpenAI model route. */ +@Generated +class OpenAiConfigPb { + @JsonProperty("microsoft_entra_client_id") + private String microsoftEntraClientId; + + @JsonProperty("microsoft_entra_client_secret") + private String microsoftEntraClientSecret; + + @JsonProperty("microsoft_entra_client_secret_plaintext") + private String microsoftEntraClientSecretPlaintext; + + @JsonProperty("microsoft_entra_tenant_id") + private String microsoftEntraTenantId; + + @JsonProperty("openai_api_base") + private String openaiApiBase; + + @JsonProperty("openai_api_key") + private String openaiApiKey; + + @JsonProperty("openai_api_key_plaintext") + private String openaiApiKeyPlaintext; + + @JsonProperty("openai_api_type") + private String openaiApiType; + + @JsonProperty("openai_api_version") + private String openaiApiVersion; + + @JsonProperty("openai_deployment_name") + private String openaiDeploymentName; + + @JsonProperty("openai_organization") + private String openaiOrganization; + + public OpenAiConfigPb setMicrosoftEntraClientId(String microsoftEntraClientId) { + this.microsoftEntraClientId = microsoftEntraClientId; + return this; + } + + public String getMicrosoftEntraClientId() { + return microsoftEntraClientId; + } + + public OpenAiConfigPb setMicrosoftEntraClientSecret(String microsoftEntraClientSecret) { + this.microsoftEntraClientSecret = microsoftEntraClientSecret; + return this; + } + + public String getMicrosoftEntraClientSecret() { + return microsoftEntraClientSecret; + } + + public OpenAiConfigPb setMicrosoftEntraClientSecretPlaintext( + String microsoftEntraClientSecretPlaintext) { + this.microsoftEntraClientSecretPlaintext = microsoftEntraClientSecretPlaintext; + return this; + } + + public String getMicrosoftEntraClientSecretPlaintext() { + return microsoftEntraClientSecretPlaintext; + } + + public OpenAiConfigPb setMicrosoftEntraTenantId(String microsoftEntraTenantId) { + this.microsoftEntraTenantId = microsoftEntraTenantId; + return this; + } + + public String getMicrosoftEntraTenantId() { + return microsoftEntraTenantId; + } + + public OpenAiConfigPb setOpenaiApiBase(String openaiApiBase) { + this.openaiApiBase = openaiApiBase; + return this; + } + + public String getOpenaiApiBase() { + return openaiApiBase; + } + + public OpenAiConfigPb setOpenaiApiKey(String openaiApiKey) { + this.openaiApiKey = openaiApiKey; + return this; + } + + public String getOpenaiApiKey() { + return openaiApiKey; + } + + public OpenAiConfigPb setOpenaiApiKeyPlaintext(String openaiApiKeyPlaintext) { + this.openaiApiKeyPlaintext = openaiApiKeyPlaintext; + return this; + } + + public String getOpenaiApiKeyPlaintext() { + return openaiApiKeyPlaintext; + } + + public OpenAiConfigPb setOpenaiApiType(String openaiApiType) { + this.openaiApiType = openaiApiType; + return this; + } + + public String getOpenaiApiType() { + return openaiApiType; + } + + public OpenAiConfigPb setOpenaiApiVersion(String openaiApiVersion) { + this.openaiApiVersion = openaiApiVersion; + return this; + } + + public String getOpenaiApiVersion() { + return openaiApiVersion; + } + + public OpenAiConfigPb setOpenaiDeploymentName(String openaiDeploymentName) { + this.openaiDeploymentName = openaiDeploymentName; + return this; + } + + public String getOpenaiDeploymentName() { + return openaiDeploymentName; + } + + public OpenAiConfigPb setOpenaiOrganization(String openaiOrganization) { + this.openaiOrganization = openaiOrganization; + return this; + } + + public String getOpenaiOrganization() { + return openaiOrganization; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OpenAiConfigPb that = (OpenAiConfigPb) o; + return Objects.equals(microsoftEntraClientId, that.microsoftEntraClientId) + && Objects.equals(microsoftEntraClientSecret, that.microsoftEntraClientSecret) + && Objects.equals( + microsoftEntraClientSecretPlaintext, that.microsoftEntraClientSecretPlaintext) + && Objects.equals(microsoftEntraTenantId, that.microsoftEntraTenantId) + && Objects.equals(openaiApiBase, that.openaiApiBase) + && Objects.equals(openaiApiKey, that.openaiApiKey) + && Objects.equals(openaiApiKeyPlaintext, that.openaiApiKeyPlaintext) + && Objects.equals(openaiApiType, that.openaiApiType) + && Objects.equals(openaiApiVersion, that.openaiApiVersion) + && Objects.equals(openaiDeploymentName, that.openaiDeploymentName) + && Objects.equals(openaiOrganization, that.openaiOrganization); + } + + @Override + public int hashCode() { + return Objects.hash( + microsoftEntraClientId, + microsoftEntraClientSecret, + microsoftEntraClientSecretPlaintext, + microsoftEntraTenantId, + openaiApiBase, + openaiApiKey, + openaiApiKeyPlaintext, + openaiApiType, + openaiApiVersion, + openaiDeploymentName, + openaiOrganization); + } + + @Override + public String toString() { + return new ToStringer(OpenAiConfigPb.class) + .add("microsoftEntraClientId", microsoftEntraClientId) + .add("microsoftEntraClientSecret", microsoftEntraClientSecret) + .add("microsoftEntraClientSecretPlaintext", microsoftEntraClientSecretPlaintext) + .add("microsoftEntraTenantId", microsoftEntraTenantId) + .add("openaiApiBase", openaiApiBase) + .add("openaiApiKey", openaiApiKey) + .add("openaiApiKeyPlaintext", openaiApiKeyPlaintext) + .add("openaiApiType", openaiApiType) + .add("openaiApiVersion", openaiApiVersion) + .add("openaiDeploymentName", openaiDeploymentName) + .add("openaiOrganization", openaiOrganization) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java index c168b4cf2..b301fb41d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PaLmConfig.PaLmConfigSerializer.class) +@JsonDeserialize(using = PaLmConfig.PaLmConfigDeserializer.class) public class PaLmConfig { /** * The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key * directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following * fields: `palm_api_key` or `palm_api_key_plaintext`. */ - @JsonProperty("palm_api_key") private String palmApiKey; /** @@ -22,7 +32,6 @@ public class PaLmConfig { * Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following * fields: `palm_api_key` or `palm_api_key_plaintext`. */ - @JsonProperty("palm_api_key_plaintext") private String palmApiKeyPlaintext; public PaLmConfig setPalmApiKey(String palmApiKey) { @@ -64,4 +73,39 @@ public String toString() { .add("palmApiKeyPlaintext", palmApiKeyPlaintext) .toString(); } + + PaLmConfigPb toPb() { + PaLmConfigPb pb = new PaLmConfigPb(); + pb.setPalmApiKey(palmApiKey); + pb.setPalmApiKeyPlaintext(palmApiKeyPlaintext); + + return pb; + } + + static PaLmConfig fromPb(PaLmConfigPb pb) { + PaLmConfig model = new PaLmConfig(); + model.setPalmApiKey(pb.getPalmApiKey()); + model.setPalmApiKeyPlaintext(pb.getPalmApiKeyPlaintext()); + + return model; + } + + public static class PaLmConfigSerializer extends JsonSerializer { + @Override + public void serialize(PaLmConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PaLmConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PaLmConfigDeserializer extends JsonDeserializer { + @Override + public PaLmConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PaLmConfigPb pb = mapper.readValue(p, PaLmConfigPb.class); + return PaLmConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfigPb.java new file mode 100755 index 000000000..e0010a8d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PaLmConfigPb { + @JsonProperty("palm_api_key") + private String palmApiKey; + + @JsonProperty("palm_api_key_plaintext") + private String palmApiKeyPlaintext; + + public PaLmConfigPb setPalmApiKey(String palmApiKey) { + this.palmApiKey = palmApiKey; + return this; + } + + public String getPalmApiKey() { + return palmApiKey; + } + + public PaLmConfigPb setPalmApiKeyPlaintext(String palmApiKeyPlaintext) { + this.palmApiKeyPlaintext = palmApiKeyPlaintext; + return this; + } + + public String getPalmApiKeyPlaintext() { + return palmApiKeyPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PaLmConfigPb that = (PaLmConfigPb) o; + return Objects.equals(palmApiKey, that.palmApiKey) + && Objects.equals(palmApiKeyPlaintext, that.palmApiKeyPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(palmApiKey, palmApiKeyPlaintext); + } + + @Override + public String toString() { + return new ToStringer(PaLmConfigPb.class) + .add("palmApiKey", palmApiKey) + .add("palmApiKeyPlaintext", palmApiKeyPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java index 13be39c8e..f10f67d13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java @@ -4,23 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PatchServingEndpointTags.PatchServingEndpointTagsSerializer.class) +@JsonDeserialize(using = PatchServingEndpointTags.PatchServingEndpointTagsDeserializer.class) public class PatchServingEndpointTags { /** List of endpoint tags to add */ - @JsonProperty("add_tags") private Collection addTags; /** List of tag keys to delete */ - @JsonProperty("delete_tags") private Collection deleteTags; /** The name of the serving endpoint who's tags to patch. This field is required. */ - @JsonIgnore private String name; + private String name; public PatchServingEndpointTags setAddTags(Collection addTags) { this.addTags = addTags; @@ -72,4 +80,45 @@ public String toString() { .add("name", name) .toString(); } + + PatchServingEndpointTagsPb toPb() { + PatchServingEndpointTagsPb pb = new PatchServingEndpointTagsPb(); + pb.setAddTags(addTags); + pb.setDeleteTags(deleteTags); + pb.setName(name); + + return pb; + } + + static PatchServingEndpointTags fromPb(PatchServingEndpointTagsPb pb) { + PatchServingEndpointTags model = new PatchServingEndpointTags(); + model.setAddTags(pb.getAddTags()); + model.setDeleteTags(pb.getDeleteTags()); + model.setName(pb.getName()); + + return model; + } + + public static class PatchServingEndpointTagsSerializer + extends JsonSerializer { + @Override + public void serialize( + PatchServingEndpointTags value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchServingEndpointTagsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchServingEndpointTagsDeserializer + extends JsonDeserializer { + @Override + public PatchServingEndpointTags deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchServingEndpointTagsPb pb = mapper.readValue(p, PatchServingEndpointTagsPb.class); + return PatchServingEndpointTags.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTagsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTagsPb.java new file mode 100755 index 000000000..1c18199c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTagsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PatchServingEndpointTagsPb { + @JsonProperty("add_tags") + private Collection addTags; + + @JsonProperty("delete_tags") + private Collection deleteTags; + + @JsonIgnore private String name; + + public PatchServingEndpointTagsPb setAddTags(Collection addTags) { + this.addTags = addTags; + return this; + } + + public Collection getAddTags() { + return addTags; + } + + public PatchServingEndpointTagsPb setDeleteTags(Collection deleteTags) { + this.deleteTags = deleteTags; + return this; + } + + public Collection getDeleteTags() { + return deleteTags; + } + + public PatchServingEndpointTagsPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PatchServingEndpointTagsPb that = (PatchServingEndpointTagsPb) o; + return Objects.equals(addTags, that.addTags) + && Objects.equals(deleteTags, that.deleteTags) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(addTags, deleteTags, name); + } + + @Override + public String toString() { + return new ToStringer(PatchServingEndpointTagsPb.class) + .add("addTags", addTags) + .add("deleteTags", deleteTags) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java index 500d98298..58bfa6d15 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PayloadTable.PayloadTableSerializer.class) +@JsonDeserialize(using = PayloadTable.PayloadTableDeserializer.class) public class PayloadTable { /** */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("status") private String status; /** */ - @JsonProperty("status_message") private String statusMessage; public PayloadTable setName(String name) { @@ -71,4 +79,41 @@ public String toString() { .add("statusMessage", statusMessage) .toString(); } + + PayloadTablePb toPb() { + PayloadTablePb pb = new PayloadTablePb(); + pb.setName(name); + pb.setStatus(status); + pb.setStatusMessage(statusMessage); + + return pb; + } + + static PayloadTable fromPb(PayloadTablePb pb) { + PayloadTable model = new PayloadTable(); + model.setName(pb.getName()); + model.setStatus(pb.getStatus()); + model.setStatusMessage(pb.getStatusMessage()); + + return model; + } + + public static class PayloadTableSerializer extends JsonSerializer { + @Override + public void serialize(PayloadTable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PayloadTablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PayloadTableDeserializer extends JsonDeserializer { + @Override + public PayloadTable deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PayloadTablePb pb = mapper.readValue(p, PayloadTablePb.class); + return PayloadTable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTablePb.java new file mode 100755 index 000000000..c68333b9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTablePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PayloadTablePb { + @JsonProperty("name") + private String name; + + @JsonProperty("status") + private String status; + + @JsonProperty("status_message") + private String statusMessage; + + public PayloadTablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PayloadTablePb setStatus(String status) { + this.status = status; + return this; + } + + public String getStatus() { + return status; + } + + public PayloadTablePb setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PayloadTablePb that = (PayloadTablePb) o; + return Objects.equals(name, that.name) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage); + } + + @Override + public int hashCode() { + return Objects.hash(name, status, statusMessage); + } + + @Override + public String toString() { + return new ToStringer(PayloadTablePb.class) + .add("name", name) + .add("status", status) + .add("statusMessage", statusMessage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java index d0a1c2c0c..bc3e8ae4f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PtEndpointCoreConfig.PtEndpointCoreConfigSerializer.class) +@JsonDeserialize(using = PtEndpointCoreConfig.PtEndpointCoreConfigDeserializer.class) public class PtEndpointCoreConfig { /** The list of served entities under the serving endpoint config. */ - @JsonProperty("served_entities") private Collection servedEntities; /** */ - @JsonProperty("traffic_config") private TrafficConfig trafficConfig; public PtEndpointCoreConfig setServedEntities(Collection servedEntities) { @@ -57,4 +66,42 @@ public String toString() { .add("trafficConfig", trafficConfig) .toString(); } + + PtEndpointCoreConfigPb toPb() { + PtEndpointCoreConfigPb pb = new PtEndpointCoreConfigPb(); + pb.setServedEntities(servedEntities); + pb.setTrafficConfig(trafficConfig); + + return pb; + } + + static PtEndpointCoreConfig fromPb(PtEndpointCoreConfigPb pb) { + PtEndpointCoreConfig model = new PtEndpointCoreConfig(); + model.setServedEntities(pb.getServedEntities()); + model.setTrafficConfig(pb.getTrafficConfig()); + + return model; + } + + public static class PtEndpointCoreConfigSerializer extends JsonSerializer { + @Override + public void serialize( + PtEndpointCoreConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PtEndpointCoreConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PtEndpointCoreConfigDeserializer + extends JsonDeserializer { + @Override + public PtEndpointCoreConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PtEndpointCoreConfigPb pb = mapper.readValue(p, PtEndpointCoreConfigPb.class); + return PtEndpointCoreConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfigPb.java new file mode 100755 index 000000000..ecbb80f35 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfigPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PtEndpointCoreConfigPb { + @JsonProperty("served_entities") + private Collection servedEntities; + + @JsonProperty("traffic_config") + private TrafficConfig trafficConfig; + + public PtEndpointCoreConfigPb setServedEntities(Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public PtEndpointCoreConfigPb setTrafficConfig(TrafficConfig trafficConfig) { + this.trafficConfig = trafficConfig; + return this; + } + + public TrafficConfig getTrafficConfig() { + return trafficConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PtEndpointCoreConfigPb that = (PtEndpointCoreConfigPb) o; + return Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(trafficConfig, that.trafficConfig); + } + + @Override + public int hashCode() { + return Objects.hash(servedEntities, trafficConfig); + } + + @Override + public String toString() { + return new ToStringer(PtEndpointCoreConfigPb.class) + .add("servedEntities", servedEntities) + .add("trafficConfig", trafficConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java index a785267c9..dcd174362 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PtServedModel.PtServedModelSerializer.class) +@JsonDeserialize(using = PtServedModel.PtServedModelDeserializer.class) public class PtServedModel { /** * The name of the entity to be served. The entity may be a model in the Databricks Model @@ -15,11 +26,9 @@ public class PtServedModel { * it is a UC object, the full name of the object should be given in the form of * **catalog_name.schema_name.model_name**. */ - @JsonProperty("entity_name") private String entityName; /** */ - @JsonProperty("entity_version") private String entityVersion; /** @@ -28,11 +37,9 @@ public class PtServedModel { * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if * not specified for other entities, it defaults to entity_name-entity_version. */ - @JsonProperty("name") private String name; /** The number of model units to be provisioned. */ - @JsonProperty("provisioned_model_units") private Long provisionedModelUnits; public PtServedModel setEntityName(String entityName) { @@ -96,4 +103,43 @@ public String toString() { .add("provisionedModelUnits", provisionedModelUnits) .toString(); } + + PtServedModelPb toPb() { + PtServedModelPb pb = new PtServedModelPb(); + pb.setEntityName(entityName); + pb.setEntityVersion(entityVersion); + pb.setName(name); + pb.setProvisionedModelUnits(provisionedModelUnits); + + return pb; + } + + static PtServedModel fromPb(PtServedModelPb pb) { + PtServedModel model = new PtServedModel(); + model.setEntityName(pb.getEntityName()); + model.setEntityVersion(pb.getEntityVersion()); + model.setName(pb.getName()); + model.setProvisionedModelUnits(pb.getProvisionedModelUnits()); + + return model; + } + + public static class PtServedModelSerializer extends JsonSerializer { + @Override + public void serialize(PtServedModel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PtServedModelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PtServedModelDeserializer extends JsonDeserializer { + @Override + public PtServedModel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PtServedModelPb pb = mapper.readValue(p, PtServedModelPb.class); + return PtServedModel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModelPb.java new file mode 100755 index 000000000..aa263490f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModelPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PtServedModelPb { + @JsonProperty("entity_name") + private String entityName; + + @JsonProperty("entity_version") + private String entityVersion; + + @JsonProperty("name") + private String name; + + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + public PtServedModelPb setEntityName(String entityName) { + this.entityName = entityName; + return this; + } + + public String getEntityName() { + return entityName; + } + + public PtServedModelPb setEntityVersion(String entityVersion) { + this.entityVersion = entityVersion; + return this; + } + + public String getEntityVersion() { + return entityVersion; + } + + public PtServedModelPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PtServedModelPb setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PtServedModelPb that = (PtServedModelPb) o; + return Objects.equals(entityName, that.entityName) + && Objects.equals(entityVersion, that.entityVersion) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits); + } + + @Override + public int hashCode() { + return Objects.hash(entityName, entityVersion, name, provisionedModelUnits); + } + + @Override + public String toString() { + return new ToStringer(PtServedModelPb.class) + .add("entityName", entityName) + .add("entityVersion", entityVersion) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java index f0f3be4d0..4a75b85d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java @@ -4,46 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PutAiGatewayRequest.PutAiGatewayRequestSerializer.class) +@JsonDeserialize(using = PutAiGatewayRequest.PutAiGatewayRequestDeserializer.class) public class PutAiGatewayRequest { /** * Configuration for traffic fallback which auto fallbacks to other served entities if the request * to a served entity fails with certain error codes, to increase availability. */ - @JsonProperty("fallback_config") private FallbackConfig fallbackConfig; /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. */ - @JsonProperty("guardrails") private AiGatewayGuardrails guardrails; /** * Configuration for payload logging using inference tables. Use these tables to monitor and audit * data being sent to and received from model APIs and to improve model quality. */ - @JsonProperty("inference_table_config") private AiGatewayInferenceTableConfig inferenceTableConfig; /** The name of the serving endpoint whose AI Gateway is being updated. This field is required. */ - @JsonIgnore private String name; + private String name; /** Configuration for rate limits which can be set to limit endpoint traffic. */ - @JsonProperty("rate_limits") private Collection rateLimits; /** * Configuration to enable usage tracking using system tables. These tables allow you to monitor * operational usage on endpoints and their associated costs. */ - @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; public PutAiGatewayRequest setFallbackConfig(FallbackConfig fallbackConfig) { @@ -132,4 +137,49 @@ public String toString() { .add("usageTrackingConfig", usageTrackingConfig) .toString(); } + + PutAiGatewayRequestPb toPb() { + PutAiGatewayRequestPb pb = new PutAiGatewayRequestPb(); + pb.setFallbackConfig(fallbackConfig); + pb.setGuardrails(guardrails); + pb.setInferenceTableConfig(inferenceTableConfig); + pb.setName(name); + pb.setRateLimits(rateLimits); + pb.setUsageTrackingConfig(usageTrackingConfig); + + return pb; + } + + static PutAiGatewayRequest fromPb(PutAiGatewayRequestPb pb) { + PutAiGatewayRequest model = new PutAiGatewayRequest(); + model.setFallbackConfig(pb.getFallbackConfig()); + model.setGuardrails(pb.getGuardrails()); + model.setInferenceTableConfig(pb.getInferenceTableConfig()); + model.setName(pb.getName()); + model.setRateLimits(pb.getRateLimits()); + model.setUsageTrackingConfig(pb.getUsageTrackingConfig()); + + return model; + } + + public static class PutAiGatewayRequestSerializer extends JsonSerializer { + @Override + public void serialize(PutAiGatewayRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutAiGatewayRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutAiGatewayRequestDeserializer + extends JsonDeserializer { + @Override + public PutAiGatewayRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutAiGatewayRequestPb pb = mapper.readValue(p, PutAiGatewayRequestPb.class); + return PutAiGatewayRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequestPb.java new file mode 100755 index 000000000..81859e987 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequestPb.java @@ -0,0 +1,117 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PutAiGatewayRequestPb { + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + + @JsonProperty("guardrails") + private AiGatewayGuardrails guardrails; + + @JsonProperty("inference_table_config") + private AiGatewayInferenceTableConfig inferenceTableConfig; + + @JsonIgnore private String name; + + @JsonProperty("rate_limits") + private Collection rateLimits; + + @JsonProperty("usage_tracking_config") + private AiGatewayUsageTrackingConfig usageTrackingConfig; + + public PutAiGatewayRequestPb setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + + public PutAiGatewayRequestPb setGuardrails(AiGatewayGuardrails guardrails) { + this.guardrails = guardrails; + return this; + } + + public AiGatewayGuardrails getGuardrails() { + return guardrails; + } + + public PutAiGatewayRequestPb setInferenceTableConfig( + AiGatewayInferenceTableConfig inferenceTableConfig) { + this.inferenceTableConfig = inferenceTableConfig; + return this; + } + + public AiGatewayInferenceTableConfig getInferenceTableConfig() { + return inferenceTableConfig; + } + + public PutAiGatewayRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PutAiGatewayRequestPb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + public PutAiGatewayRequestPb setUsageTrackingConfig( + AiGatewayUsageTrackingConfig usageTrackingConfig) { + this.usageTrackingConfig = usageTrackingConfig; + return this; + } + + public AiGatewayUsageTrackingConfig getUsageTrackingConfig() { + return usageTrackingConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutAiGatewayRequestPb that = (PutAiGatewayRequestPb) o; + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) + && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) + && Objects.equals(name, that.name) + && Objects.equals(rateLimits, that.rateLimits) + && Objects.equals(usageTrackingConfig, that.usageTrackingConfig); + } + + @Override + public int hashCode() { + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, name, rateLimits, usageTrackingConfig); + } + + @Override + public String toString() { + return new ToStringer(PutAiGatewayRequestPb.class) + .add("fallbackConfig", fallbackConfig) + .add("guardrails", guardrails) + .add("inferenceTableConfig", inferenceTableConfig) + .add("name", name) + .add("rateLimits", rateLimits) + .add("usageTrackingConfig", usageTrackingConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java index 9b1a9c166..a5ae9d212 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java @@ -4,42 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PutAiGatewayResponse.PutAiGatewayResponseSerializer.class) +@JsonDeserialize(using = PutAiGatewayResponse.PutAiGatewayResponseDeserializer.class) public class PutAiGatewayResponse { /** * Configuration for traffic fallback which auto fallbacks to other served entities if the request * to a served entity fails with certain error codes, to increase availability. */ - @JsonProperty("fallback_config") private FallbackConfig fallbackConfig; /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. */ - @JsonProperty("guardrails") private AiGatewayGuardrails guardrails; /** * Configuration for payload logging using inference tables. Use these tables to monitor and audit * data being sent to and received from model APIs and to improve model quality. */ - @JsonProperty("inference_table_config") private AiGatewayInferenceTableConfig inferenceTableConfig; /** Configuration for rate limits which can be set to limit endpoint traffic. */ - @JsonProperty("rate_limits") private Collection rateLimits; /** * Configuration to enable usage tracking using system tables. These tables allow you to monitor * operational usage on endpoints and their associated costs. */ - @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; public PutAiGatewayResponse setFallbackConfig(FallbackConfig fallbackConfig) { @@ -117,4 +123,48 @@ public String toString() { .add("usageTrackingConfig", usageTrackingConfig) .toString(); } + + PutAiGatewayResponsePb toPb() { + PutAiGatewayResponsePb pb = new PutAiGatewayResponsePb(); + pb.setFallbackConfig(fallbackConfig); + pb.setGuardrails(guardrails); + pb.setInferenceTableConfig(inferenceTableConfig); + pb.setRateLimits(rateLimits); + pb.setUsageTrackingConfig(usageTrackingConfig); + + return pb; + } + + static PutAiGatewayResponse fromPb(PutAiGatewayResponsePb pb) { + PutAiGatewayResponse model = new PutAiGatewayResponse(); + model.setFallbackConfig(pb.getFallbackConfig()); + model.setGuardrails(pb.getGuardrails()); + model.setInferenceTableConfig(pb.getInferenceTableConfig()); + model.setRateLimits(pb.getRateLimits()); + model.setUsageTrackingConfig(pb.getUsageTrackingConfig()); + + return model; + } + + public static class PutAiGatewayResponseSerializer extends JsonSerializer { + @Override + public void serialize( + PutAiGatewayResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutAiGatewayResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutAiGatewayResponseDeserializer + extends JsonDeserializer { + @Override + public PutAiGatewayResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutAiGatewayResponsePb pb = mapper.readValue(p, PutAiGatewayResponsePb.class); + return PutAiGatewayResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponsePb.java new file mode 100755 index 000000000..085707b6f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponsePb.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PutAiGatewayResponsePb { + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + + @JsonProperty("guardrails") + private AiGatewayGuardrails guardrails; + + @JsonProperty("inference_table_config") + private AiGatewayInferenceTableConfig inferenceTableConfig; + + @JsonProperty("rate_limits") + private Collection rateLimits; + + @JsonProperty("usage_tracking_config") + private AiGatewayUsageTrackingConfig usageTrackingConfig; + + public PutAiGatewayResponsePb setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + + public PutAiGatewayResponsePb setGuardrails(AiGatewayGuardrails guardrails) { + this.guardrails = guardrails; + return this; + } + + public AiGatewayGuardrails getGuardrails() { + return guardrails; + } + + public PutAiGatewayResponsePb setInferenceTableConfig( + AiGatewayInferenceTableConfig inferenceTableConfig) { + this.inferenceTableConfig = inferenceTableConfig; + return this; + } + + public AiGatewayInferenceTableConfig getInferenceTableConfig() { + return inferenceTableConfig; + } + + public PutAiGatewayResponsePb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + public PutAiGatewayResponsePb setUsageTrackingConfig( + AiGatewayUsageTrackingConfig usageTrackingConfig) { + this.usageTrackingConfig = usageTrackingConfig; + return this; + } + + public AiGatewayUsageTrackingConfig getUsageTrackingConfig() { + return usageTrackingConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutAiGatewayResponsePb that = (PutAiGatewayResponsePb) o; + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) + && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) + && Objects.equals(rateLimits, that.rateLimits) + && Objects.equals(usageTrackingConfig, that.usageTrackingConfig); + } + + @Override + public int hashCode() { + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); + } + + @Override + public String toString() { + return new ToStringer(PutAiGatewayResponsePb.class) + .add("fallbackConfig", fallbackConfig) + .add("guardrails", guardrails) + .add("inferenceTableConfig", inferenceTableConfig) + .add("rateLimits", rateLimits) + .add("usageTrackingConfig", usageTrackingConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java index f8cf5eb23..aa9176d29 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PutRequest.PutRequestSerializer.class) +@JsonDeserialize(using = PutRequest.PutRequestDeserializer.class) public class PutRequest { /** * The name of the serving endpoint whose rate limits are being updated. This field is required. */ - @JsonIgnore private String name; + private String name; /** The list of endpoint rate limits. */ - @JsonProperty("rate_limits") private Collection rateLimits; public PutRequest setName(String name) { @@ -58,4 +67,39 @@ public String toString() { .add("rateLimits", rateLimits) .toString(); } + + PutRequestPb toPb() { + PutRequestPb pb = new PutRequestPb(); + pb.setName(name); + pb.setRateLimits(rateLimits); + + return pb; + } + + static PutRequest fromPb(PutRequestPb pb) { + PutRequest model = new PutRequest(); + model.setName(pb.getName()); + model.setRateLimits(pb.getRateLimits()); + + return model; + } + + public static class PutRequestSerializer extends JsonSerializer { + @Override + public void serialize(PutRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutRequestDeserializer extends JsonDeserializer { + @Override + public PutRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutRequestPb pb = mapper.readValue(p, PutRequestPb.class); + return PutRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequestPb.java new file mode 100755 index 000000000..2557ce214 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PutRequestPb { + @JsonIgnore private String name; + + @JsonProperty("rate_limits") + private Collection rateLimits; + + public PutRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PutRequestPb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutRequestPb that = (PutRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(rateLimits, that.rateLimits); + } + + @Override + public int hashCode() { + return Objects.hash(name, rateLimits); + } + + @Override + public String toString() { + return new ToStringer(PutRequestPb.class) + .add("name", name) + .add("rateLimits", rateLimits) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponse.java index 5458eb136..a627b0e92 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PutResponse.PutResponseSerializer.class) +@JsonDeserialize(using = PutResponse.PutResponseDeserializer.class) public class PutResponse { /** The list of endpoint rate limits. */ - @JsonProperty("rate_limits") private Collection rateLimits; public PutResponse setRateLimits(Collection rateLimits) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(PutResponse.class).add("rateLimits", rateLimits).toString(); } + + PutResponsePb toPb() { + PutResponsePb pb = new PutResponsePb(); + pb.setRateLimits(rateLimits); + + return pb; + } + + static PutResponse fromPb(PutResponsePb pb) { + PutResponse model = new PutResponse(); + model.setRateLimits(pb.getRateLimits()); + + return model; + } + + public static class PutResponseSerializer extends JsonSerializer { + @Override + public void serialize(PutResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutResponseDeserializer extends JsonDeserializer { + @Override + public PutResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutResponsePb pb = mapper.readValue(p, PutResponsePb.class); + return PutResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponsePb.java new file mode 100755 index 000000000..5dcc86fb9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PutResponsePb { + @JsonProperty("rate_limits") + private Collection rateLimits; + + public PutResponsePb setRateLimits(Collection rateLimits) { + this.rateLimits = rateLimits; + return this; + } + + public Collection getRateLimits() { + return rateLimits; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutResponsePb that = (PutResponsePb) o; + return Objects.equals(rateLimits, that.rateLimits); + } + + @Override + public int hashCode() { + return Objects.hash(rateLimits); + } + + @Override + public String toString() { + return new ToStringer(PutResponsePb.class).add("rateLimits", rateLimits).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java index 945a9b2b6..f06f8f44a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java @@ -4,20 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = QueryEndpointInput.QueryEndpointInputSerializer.class) +@JsonDeserialize(using = QueryEndpointInput.QueryEndpointInputDeserializer.class) public class QueryEndpointInput { /** Pandas Dataframe input in the records orientation. */ - @JsonProperty("dataframe_records") private Collection dataframeRecords; /** Pandas Dataframe input in the split orientation. */ - @JsonProperty("dataframe_split") private DataframeSplitInput dataframeSplit; /** @@ -25,7 +33,6 @@ public class QueryEndpointInput { * foundation model__ serving endpoints. This is a map of strings and should only be used with * other external/foundation model query fields. */ - @JsonProperty("extra_params") private Map extraParams; /** @@ -33,15 +40,12 @@ public class QueryEndpointInput { * model__ serving endpoints and is the only field (along with extra_params if needed) used by * embeddings queries. */ - @JsonProperty("input") private Object input; /** Tensor-based input in columnar format. */ - @JsonProperty("inputs") private Object inputs; /** Tensor-based input in row format. */ - @JsonProperty("instances") private Collection instances; /** @@ -49,14 +53,12 @@ public class QueryEndpointInput { * serving endpoints. This is an integer and should only be used with other chat/completions query * fields. */ - @JsonProperty("max_tokens") private Long maxTokens; /** * The messages field used ONLY for __chat external & foundation model__ serving endpoints. This * is a map of strings and should only be used with other chat query fields. */ - @JsonProperty("messages") private Collection messages; /** @@ -64,17 +66,15 @@ public class QueryEndpointInput { * foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 * and should only be used with other chat/completions query fields. */ - @JsonProperty("n") private Long n; /** The name of the serving endpoint. This field is required. */ - @JsonIgnore private String name; + private String name; /** * The prompt string (or array of strings) field used ONLY for __completions external & foundation * model__ serving endpoints and should only be used with other completions query fields. */ - @JsonProperty("prompt") private Object prompt; /** @@ -82,7 +82,6 @@ public class QueryEndpointInput { * serving endpoints. This is a list of strings and should only be used with other * chat/completions query fields. */ - @JsonProperty("stop") private Collection stop; /** @@ -90,7 +89,6 @@ public class QueryEndpointInput { * endpoints. This is a boolean defaulting to false and should only be used with other * chat/completions query fields. */ - @JsonProperty("stream") private Boolean stream; /** @@ -98,7 +96,6 @@ public class QueryEndpointInput { * serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be * used with other chat/completions query fields. */ - @JsonProperty("temperature") private Double temperature; public QueryEndpointInput setDataframeRecords(Collection dataframeRecords) { @@ -286,4 +283,64 @@ public String toString() { .add("temperature", temperature) .toString(); } + + QueryEndpointInputPb toPb() { + QueryEndpointInputPb pb = new QueryEndpointInputPb(); + pb.setDataframeRecords(dataframeRecords); + pb.setDataframeSplit(dataframeSplit); + pb.setExtraParams(extraParams); + pb.setInput(input); + pb.setInputs(inputs); + pb.setInstances(instances); + pb.setMaxTokens(maxTokens); + pb.setMessages(messages); + pb.setN(n); + pb.setName(name); + pb.setPrompt(prompt); + pb.setStop(stop); + pb.setStream(stream); + pb.setTemperature(temperature); + + return pb; + } + + static QueryEndpointInput fromPb(QueryEndpointInputPb pb) { + QueryEndpointInput model = new QueryEndpointInput(); + model.setDataframeRecords(pb.getDataframeRecords()); + model.setDataframeSplit(pb.getDataframeSplit()); + model.setExtraParams(pb.getExtraParams()); + model.setInput(pb.getInput()); + model.setInputs(pb.getInputs()); + model.setInstances(pb.getInstances()); + model.setMaxTokens(pb.getMaxTokens()); + model.setMessages(pb.getMessages()); + model.setN(pb.getN()); + model.setName(pb.getName()); + model.setPrompt(pb.getPrompt()); + model.setStop(pb.getStop()); + model.setStream(pb.getStream()); + model.setTemperature(pb.getTemperature()); + + return model; + } + + public static class QueryEndpointInputSerializer extends JsonSerializer { + @Override + public void serialize(QueryEndpointInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryEndpointInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryEndpointInputDeserializer extends JsonDeserializer { + @Override + public QueryEndpointInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryEndpointInputPb pb = mapper.readValue(p, QueryEndpointInputPb.class); + return QueryEndpointInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInputPb.java new file mode 100755 index 000000000..d76904a4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInputPb.java @@ -0,0 +1,241 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class QueryEndpointInputPb { + @JsonProperty("dataframe_records") + private Collection dataframeRecords; + + @JsonProperty("dataframe_split") + private DataframeSplitInput dataframeSplit; + + @JsonProperty("extra_params") + private Map extraParams; + + @JsonProperty("input") + private Object input; + + @JsonProperty("inputs") + private Object inputs; + + @JsonProperty("instances") + private Collection instances; + + @JsonProperty("max_tokens") + private Long maxTokens; + + @JsonProperty("messages") + private Collection messages; + + @JsonProperty("n") + private Long n; + + @JsonIgnore private String name; + + @JsonProperty("prompt") + private Object prompt; + + @JsonProperty("stop") + private Collection stop; + + @JsonProperty("stream") + private Boolean stream; + + @JsonProperty("temperature") + private Double temperature; + + public QueryEndpointInputPb setDataframeRecords(Collection dataframeRecords) { + this.dataframeRecords = dataframeRecords; + return this; + } + + public Collection getDataframeRecords() { + return dataframeRecords; + } + + public QueryEndpointInputPb setDataframeSplit(DataframeSplitInput dataframeSplit) { + this.dataframeSplit = dataframeSplit; + return this; + } + + public DataframeSplitInput getDataframeSplit() { + return dataframeSplit; + } + + public QueryEndpointInputPb setExtraParams(Map extraParams) { + this.extraParams = extraParams; + return this; + } + + public Map getExtraParams() { + return extraParams; + } + + public QueryEndpointInputPb setInput(Object input) { + this.input = input; + return this; + } + + public Object getInput() { + return input; + } + + public QueryEndpointInputPb setInputs(Object inputs) { + this.inputs = inputs; + return this; + } + + public Object getInputs() { + return inputs; + } + + public QueryEndpointInputPb setInstances(Collection instances) { + this.instances = instances; + return this; + } + + public Collection getInstances() { + return instances; + } + + public QueryEndpointInputPb setMaxTokens(Long maxTokens) { + this.maxTokens = maxTokens; + return this; + } + + public Long getMaxTokens() { + return maxTokens; + } + + public QueryEndpointInputPb setMessages(Collection messages) { + this.messages = messages; + return this; + } + + public Collection getMessages() { + return messages; + } + + public QueryEndpointInputPb setN(Long n) { + this.n = n; + return this; + } + + public Long getN() { + return n; + } + + public QueryEndpointInputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public QueryEndpointInputPb setPrompt(Object prompt) { + this.prompt = prompt; + return this; + } + + public Object getPrompt() { + return prompt; + } + + public QueryEndpointInputPb setStop(Collection stop) { + this.stop = stop; + return this; + } + + public Collection getStop() { + return stop; + } + + public QueryEndpointInputPb setStream(Boolean stream) { + this.stream = stream; + return this; + } + + public Boolean getStream() { + return stream; + } + + public QueryEndpointInputPb setTemperature(Double temperature) { + this.temperature = temperature; + return this; + } + + public Double getTemperature() { + return temperature; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryEndpointInputPb that = (QueryEndpointInputPb) o; + return Objects.equals(dataframeRecords, that.dataframeRecords) + && Objects.equals(dataframeSplit, that.dataframeSplit) + && Objects.equals(extraParams, that.extraParams) + && Objects.equals(input, that.input) + && Objects.equals(inputs, that.inputs) + && Objects.equals(instances, that.instances) + && Objects.equals(maxTokens, that.maxTokens) + && Objects.equals(messages, that.messages) + && Objects.equals(n, that.n) + && Objects.equals(name, that.name) + && Objects.equals(prompt, that.prompt) + && Objects.equals(stop, that.stop) + && Objects.equals(stream, that.stream) + && Objects.equals(temperature, that.temperature); + } + + @Override + public int hashCode() { + return Objects.hash( + dataframeRecords, + dataframeSplit, + extraParams, + input, + inputs, + instances, + maxTokens, + messages, + n, + name, + prompt, + stop, + stream, + temperature); + } + + @Override + public String toString() { + return new ToStringer(QueryEndpointInputPb.class) + .add("dataframeRecords", dataframeRecords) + .add("dataframeSplit", dataframeSplit) + .add("extraParams", extraParams) + .add("input", input) + .add("inputs", inputs) + .add("instances", instances) + .add("maxTokens", maxTokens) + .add("messages", messages) + .add("n", n) + .add("name", name) + .add("prompt", prompt) + .add("stop", stop) + .add("stream", stream) + .add("temperature", temperature) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java index cc5be3bc5..1c539d294 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java @@ -3,74 +3,73 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Header; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryEndpointResponse.QueryEndpointResponseSerializer.class) +@JsonDeserialize(using = QueryEndpointResponse.QueryEndpointResponseDeserializer.class) public class QueryEndpointResponse { /** * The list of choices returned by the __chat or completions external/foundation model__ serving * endpoint. */ - @JsonProperty("choices") private Collection choices; /** * The timestamp in seconds when the query was created in Unix time returned by a __completions or * chat external/foundation model__ serving endpoint. */ - @JsonProperty("created") private Long created; /** * The list of the embeddings returned by the __embeddings external/foundation model__ serving * endpoint. */ - @JsonProperty("data") private Collection data; /** * The ID of the query that may be returned by a __completions or chat external/foundation model__ * serving endpoint. */ - @JsonProperty("id") private String id; /** * The name of the __external/foundation model__ used for querying. This is the name of the model * that was specified in the endpoint config. */ - @JsonProperty("model") private String model; /** * The type of object returned by the __external/foundation model__ serving endpoint, one of * [text_completion, chat.completion, list (of embeddings)]. */ - @JsonProperty("object") private QueryEndpointResponseObject object; /** The predictions returned by the serving endpoint. */ - @JsonProperty("predictions") private Collection predictions; /** * The name of the served model that served the request. This is useful when there are multiple * models behind the same endpoint with traffic split. */ - @JsonIgnore - @Header("served-model-name") private String servedModelName; /** * The usage object that may be returned by the __external/foundation model__ serving endpoint. * This contains information about the number of tokens used in the prompt and response. */ - @JsonProperty("usage") private ExternalModelUsageElement usage; public QueryEndpointResponse setChoices(Collection choices) { @@ -190,4 +189,57 @@ public String toString() { .add("usage", usage) .toString(); } + + QueryEndpointResponsePb toPb() { + QueryEndpointResponsePb pb = new QueryEndpointResponsePb(); + pb.setChoices(choices); + pb.setCreated(created); + pb.setData(data); + pb.setId(id); + pb.setModel(model); + pb.setObject(object); + pb.setPredictions(predictions); + pb.setServedModelName(servedModelName); + pb.setUsage(usage); + + return pb; + } + + static QueryEndpointResponse fromPb(QueryEndpointResponsePb pb) { + QueryEndpointResponse model = new QueryEndpointResponse(); + model.setChoices(pb.getChoices()); + model.setCreated(pb.getCreated()); + model.setData(pb.getData()); + model.setId(pb.getId()); + model.setModel(pb.getModel()); + model.setObject(pb.getObject()); + model.setPredictions(pb.getPredictions()); + model.setServedModelName(pb.getServedModelName()); + model.setUsage(pb.getUsage()); + + return model; + } + + public static class QueryEndpointResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + QueryEndpointResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryEndpointResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryEndpointResponseDeserializer + extends JsonDeserializer { + @Override + public QueryEndpointResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryEndpointResponsePb pb = mapper.readValue(p, QueryEndpointResponsePb.class); + return QueryEndpointResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponsePb.java new file mode 100755 index 000000000..e2b79da5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponsePb.java @@ -0,0 +1,160 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Header; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryEndpointResponsePb { + @JsonProperty("choices") + private Collection choices; + + @JsonProperty("created") + private Long created; + + @JsonProperty("data") + private Collection data; + + @JsonProperty("id") + private String id; + + @JsonProperty("model") + private String model; + + @JsonProperty("object") + private QueryEndpointResponseObject object; + + @JsonProperty("predictions") + private Collection predictions; + + @JsonIgnore + @Header("served-model-name") + private String servedModelName; + + @JsonProperty("usage") + private ExternalModelUsageElement usage; + + public QueryEndpointResponsePb setChoices(Collection choices) { + this.choices = choices; + return this; + } + + public Collection getChoices() { + return choices; + } + + public QueryEndpointResponsePb setCreated(Long created) { + this.created = created; + return this; + } + + public Long getCreated() { + return created; + } + + public QueryEndpointResponsePb setData(Collection data) { + this.data = data; + return this; + } + + public Collection getData() { + return data; + } + + public QueryEndpointResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public QueryEndpointResponsePb setModel(String model) { + this.model = model; + return this; + } + + public String getModel() { + return model; + } + + public QueryEndpointResponsePb setObject(QueryEndpointResponseObject object) { + this.object = object; + return this; + } + + public QueryEndpointResponseObject getObject() { + return object; + } + + public QueryEndpointResponsePb setPredictions(Collection predictions) { + this.predictions = predictions; + return this; + } + + public Collection getPredictions() { + return predictions; + } + + public QueryEndpointResponsePb setServedModelName(String servedModelName) { + this.servedModelName = servedModelName; + return this; + } + + public String getServedModelName() { + return servedModelName; + } + + public QueryEndpointResponsePb setUsage(ExternalModelUsageElement usage) { + this.usage = usage; + return this; + } + + public ExternalModelUsageElement getUsage() { + return usage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryEndpointResponsePb that = (QueryEndpointResponsePb) o; + return Objects.equals(choices, that.choices) + && Objects.equals(created, that.created) + && Objects.equals(data, that.data) + && Objects.equals(id, that.id) + && Objects.equals(model, that.model) + && Objects.equals(object, that.object) + && Objects.equals(predictions, that.predictions) + && Objects.equals(servedModelName, that.servedModelName) + && Objects.equals(usage, that.usage); + } + + @Override + public int hashCode() { + return Objects.hash( + choices, created, data, id, model, object, predictions, servedModelName, usage); + } + + @Override + public String toString() { + return new ToStringer(QueryEndpointResponsePb.class) + .add("choices", choices) + .add("created", created) + .add("data", data) + .add("id", id) + .add("model", model) + .add("object", object) + .add("predictions", predictions) + .add("servedModelName", servedModelName) + .add("usage", usage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimit.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimit.java index 3dd8145fd..b439a4cc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimit.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimit.java @@ -4,26 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RateLimit.RateLimitSerializer.class) +@JsonDeserialize(using = RateLimit.RateLimitDeserializer.class) public class RateLimit { /** Used to specify how many calls are allowed for a key within the renewal_period. */ - @JsonProperty("calls") private Long calls; /** * Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are * supported, with 'endpoint' being the default if not specified. */ - @JsonProperty("key") private RateLimitKey key; /** * Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. */ - @JsonProperty("renewal_period") private RateLimitRenewalPeriod renewalPeriod; public RateLimit setCalls(Long calls) { @@ -76,4 +84,41 @@ public String toString() { .add("renewalPeriod", renewalPeriod) .toString(); } + + RateLimitPb toPb() { + RateLimitPb pb = new RateLimitPb(); + pb.setCalls(calls); + pb.setKey(key); + pb.setRenewalPeriod(renewalPeriod); + + return pb; + } + + static RateLimit fromPb(RateLimitPb pb) { + RateLimit model = new RateLimit(); + model.setCalls(pb.getCalls()); + model.setKey(pb.getKey()); + model.setRenewalPeriod(pb.getRenewalPeriod()); + + return model; + } + + public static class RateLimitSerializer extends JsonSerializer { + @Override + public void serialize(RateLimit value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RateLimitPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RateLimitDeserializer extends JsonDeserializer { + @Override + public RateLimit deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RateLimitPb pb = mapper.readValue(p, RateLimitPb.class); + return RateLimit.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitPb.java new file mode 100755 index 000000000..8434f532e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RateLimitPb { + @JsonProperty("calls") + private Long calls; + + @JsonProperty("key") + private RateLimitKey key; + + @JsonProperty("renewal_period") + private RateLimitRenewalPeriod renewalPeriod; + + public RateLimitPb setCalls(Long calls) { + this.calls = calls; + return this; + } + + public Long getCalls() { + return calls; + } + + public RateLimitPb setKey(RateLimitKey key) { + this.key = key; + return this; + } + + public RateLimitKey getKey() { + return key; + } + + public RateLimitPb setRenewalPeriod(RateLimitRenewalPeriod renewalPeriod) { + this.renewalPeriod = renewalPeriod; + return this; + } + + public RateLimitRenewalPeriod getRenewalPeriod() { + return renewalPeriod; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RateLimitPb that = (RateLimitPb) o; + return Objects.equals(calls, that.calls) + && Objects.equals(key, that.key) + && Objects.equals(renewalPeriod, that.renewalPeriod); + } + + @Override + public int hashCode() { + return Objects.hash(calls, key, renewalPeriod); + } + + @Override + public String toString() { + return new ToStringer(RateLimitPb.class) + .add("calls", calls) + .add("key", key) + .add("renewalPeriod", renewalPeriod) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Route.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Route.java index 4d6fbb6ea..fc34afc2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Route.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Route.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Route.RouteSerializer.class) +@JsonDeserialize(using = Route.RouteDeserializer.class) public class Route { /** The name of the served model this route configures traffic for. */ - @JsonProperty("served_model_name") private String servedModelName; /** * The percentage of endpoint traffic to send to this route. It must be an integer between 0 and * 100 inclusive. */ - @JsonProperty("traffic_percentage") private Long trafficPercentage; public Route setServedModelName(String servedModelName) { @@ -59,4 +68,39 @@ public String toString() { .add("trafficPercentage", trafficPercentage) .toString(); } + + RoutePb toPb() { + RoutePb pb = new RoutePb(); + pb.setServedModelName(servedModelName); + pb.setTrafficPercentage(trafficPercentage); + + return pb; + } + + static Route fromPb(RoutePb pb) { + Route model = new Route(); + model.setServedModelName(pb.getServedModelName()); + model.setTrafficPercentage(pb.getTrafficPercentage()); + + return model; + } + + public static class RouteSerializer extends JsonSerializer { + @Override + public void serialize(Route value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RoutePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RouteDeserializer extends JsonDeserializer { + @Override + public Route deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RoutePb pb = mapper.readValue(p, RoutePb.class); + return Route.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RoutePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RoutePb.java new file mode 100755 index 000000000..e92d763ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RoutePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RoutePb { + @JsonProperty("served_model_name") + private String servedModelName; + + @JsonProperty("traffic_percentage") + private Long trafficPercentage; + + public RoutePb setServedModelName(String servedModelName) { + this.servedModelName = servedModelName; + return this; + } + + public String getServedModelName() { + return servedModelName; + } + + public RoutePb setTrafficPercentage(Long trafficPercentage) { + this.trafficPercentage = trafficPercentage; + return this; + } + + public Long getTrafficPercentage() { + return trafficPercentage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RoutePb that = (RoutePb) o; + return Objects.equals(servedModelName, that.servedModelName) + && Objects.equals(trafficPercentage, that.trafficPercentage); + } + + @Override + public int hashCode() { + return Objects.hash(servedModelName, trafficPercentage); + } + + @Override + public String toString() { + return new ToStringer(RoutePb.class) + .add("servedModelName", servedModelName) + .add("trafficPercentage", trafficPercentage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java index ca9ccf251..d273b01f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ServedEntityInput.ServedEntityInputSerializer.class) +@JsonDeserialize(using = ServedEntityInput.ServedEntityInputDeserializer.class) public class ServedEntityInput { /** * The name of the entity to be served. The entity may be a model in the Databricks Model @@ -16,11 +27,9 @@ public class ServedEntityInput { * it is a UC object, the full name of the object should be given in the form of * **catalog_name.schema_name.model_name**. */ - @JsonProperty("entity_name") private String entityName; /** */ - @JsonProperty("entity_version") private String entityVersion; /** @@ -29,7 +38,6 @@ public class ServedEntityInput { * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": * "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` */ - @JsonProperty("environment_vars") private Map environmentVars; /** @@ -41,33 +49,27 @@ public class ServedEntityInput { * add external_model later. The task type of all external models within an endpoint must be the * same. */ - @JsonProperty("external_model") private ExternalModel externalModel; /** ARN of the instance profile that the served entity uses to access AWS resources. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** * The maximum provisioned concurrency that the endpoint can scale up to. Do not use if * workload_size is specified. */ - @JsonProperty("max_provisioned_concurrency") private Long maxProvisionedConcurrency; /** The maximum tokens per second that the endpoint can scale up to. */ - @JsonProperty("max_provisioned_throughput") private Long maxProvisionedThroughput; /** * The minimum provisioned concurrency that the endpoint can scale down to. Do not use if * workload_size is specified. */ - @JsonProperty("min_provisioned_concurrency") private Long minProvisionedConcurrency; /** The minimum tokens per second that the endpoint can scale down to. */ - @JsonProperty("min_provisioned_throughput") private Long minProvisionedThroughput; /** @@ -76,15 +78,12 @@ public class ServedEntityInput { * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if * not specified for other entities, it defaults to entity_name-entity_version. */ - @JsonProperty("name") private String name; /** The number of model units provisioned. */ - @JsonProperty("provisioned_model_units") private Long provisionedModelUnits; /** Whether the compute resources for the served entity should scale down to zero. */ - @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; /** @@ -97,7 +96,6 @@ public class ServedEntityInput { * workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency * are specified. */ - @JsonProperty("workload_size") private String workloadSize; /** @@ -109,7 +107,6 @@ public class ServedEntityInput { *

[GPU types]: * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types */ - @JsonProperty("workload_type") private ServingModelWorkloadType workloadType; public ServedEntityInput setEntityName(String entityName) { @@ -297,4 +294,64 @@ public String toString() { .add("workloadType", workloadType) .toString(); } + + ServedEntityInputPb toPb() { + ServedEntityInputPb pb = new ServedEntityInputPb(); + pb.setEntityName(entityName); + pb.setEntityVersion(entityVersion); + pb.setEnvironmentVars(environmentVars); + pb.setExternalModel(externalModel); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxProvisionedConcurrency(maxProvisionedConcurrency); + pb.setMaxProvisionedThroughput(maxProvisionedThroughput); + pb.setMinProvisionedConcurrency(minProvisionedConcurrency); + pb.setMinProvisionedThroughput(minProvisionedThroughput); + pb.setName(name); + pb.setProvisionedModelUnits(provisionedModelUnits); + pb.setScaleToZeroEnabled(scaleToZeroEnabled); + pb.setWorkloadSize(workloadSize); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ServedEntityInput fromPb(ServedEntityInputPb pb) { + ServedEntityInput model = new ServedEntityInput(); + model.setEntityName(pb.getEntityName()); + model.setEntityVersion(pb.getEntityVersion()); + model.setEnvironmentVars(pb.getEnvironmentVars()); + model.setExternalModel(pb.getExternalModel()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxProvisionedConcurrency(pb.getMaxProvisionedConcurrency()); + model.setMaxProvisionedThroughput(pb.getMaxProvisionedThroughput()); + model.setMinProvisionedConcurrency(pb.getMinProvisionedConcurrency()); + model.setMinProvisionedThroughput(pb.getMinProvisionedThroughput()); + model.setName(pb.getName()); + model.setProvisionedModelUnits(pb.getProvisionedModelUnits()); + model.setScaleToZeroEnabled(pb.getScaleToZeroEnabled()); + model.setWorkloadSize(pb.getWorkloadSize()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ServedEntityInputSerializer extends JsonSerializer { + @Override + public void serialize(ServedEntityInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedEntityInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedEntityInputDeserializer extends JsonDeserializer { + @Override + public ServedEntityInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedEntityInputPb pb = mapper.readValue(p, ServedEntityInputPb.class); + return ServedEntityInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInputPb.java new file mode 100755 index 000000000..b92a0c56f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInputPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ServedEntityInputPb { + @JsonProperty("entity_name") + private String entityName; + + @JsonProperty("entity_version") + private String entityVersion; + + @JsonProperty("environment_vars") + private Map environmentVars; + + @JsonProperty("external_model") + private ExternalModel externalModel; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_provisioned_concurrency") + private Long maxProvisionedConcurrency; + + @JsonProperty("max_provisioned_throughput") + private Long maxProvisionedThroughput; + + @JsonProperty("min_provisioned_concurrency") + private Long minProvisionedConcurrency; + + @JsonProperty("min_provisioned_throughput") + private Long minProvisionedThroughput; + + @JsonProperty("name") + private String name; + + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + @JsonProperty("scale_to_zero_enabled") + private Boolean scaleToZeroEnabled; + + @JsonProperty("workload_size") + private String workloadSize; + + @JsonProperty("workload_type") + private ServingModelWorkloadType workloadType; + + public ServedEntityInputPb setEntityName(String entityName) { + this.entityName = entityName; + return this; + } + + public String getEntityName() { + return entityName; + } + + public ServedEntityInputPb setEntityVersion(String entityVersion) { + this.entityVersion = entityVersion; + return this; + } + + public String getEntityVersion() { + return entityVersion; + } + + public ServedEntityInputPb setEnvironmentVars(Map environmentVars) { + this.environmentVars = environmentVars; + return this; + } + + public Map getEnvironmentVars() { + return environmentVars; + } + + public ServedEntityInputPb setExternalModel(ExternalModel externalModel) { + this.externalModel = externalModel; + return this; + } + + public ExternalModel getExternalModel() { + return externalModel; + } + + public ServedEntityInputPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public ServedEntityInputPb setMaxProvisionedConcurrency(Long maxProvisionedConcurrency) { + this.maxProvisionedConcurrency = maxProvisionedConcurrency; + return this; + } + + public Long getMaxProvisionedConcurrency() { + return maxProvisionedConcurrency; + } + + public ServedEntityInputPb setMaxProvisionedThroughput(Long maxProvisionedThroughput) { + this.maxProvisionedThroughput = maxProvisionedThroughput; + return this; + } + + public Long getMaxProvisionedThroughput() { + return maxProvisionedThroughput; + } + + public ServedEntityInputPb setMinProvisionedConcurrency(Long minProvisionedConcurrency) { + this.minProvisionedConcurrency = minProvisionedConcurrency; + return this; + } + + public Long getMinProvisionedConcurrency() { + return minProvisionedConcurrency; + } + + public ServedEntityInputPb setMinProvisionedThroughput(Long minProvisionedThroughput) { + this.minProvisionedThroughput = minProvisionedThroughput; + return this; + } + + public Long getMinProvisionedThroughput() { + return minProvisionedThroughput; + } + + public ServedEntityInputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServedEntityInputPb setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + public ServedEntityInputPb setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { + this.scaleToZeroEnabled = scaleToZeroEnabled; + return this; + } + + public Boolean getScaleToZeroEnabled() { + return scaleToZeroEnabled; + } + + public ServedEntityInputPb setWorkloadSize(String workloadSize) { + this.workloadSize = workloadSize; + return this; + } + + public String getWorkloadSize() { + return workloadSize; + } + + public ServedEntityInputPb setWorkloadType(ServingModelWorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public ServingModelWorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedEntityInputPb that = (ServedEntityInputPb) o; + return Objects.equals(entityName, that.entityName) + && Objects.equals(entityVersion, that.entityVersion) + && Objects.equals(environmentVars, that.environmentVars) + && Objects.equals(externalModel, that.externalModel) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxProvisionedConcurrency, that.maxProvisionedConcurrency) + && Objects.equals(maxProvisionedThroughput, that.maxProvisionedThroughput) + && Objects.equals(minProvisionedConcurrency, that.minProvisionedConcurrency) + && Objects.equals(minProvisionedThroughput, that.minProvisionedThroughput) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) + && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + entityName, + entityVersion, + environmentVars, + externalModel, + instanceProfileArn, + maxProvisionedConcurrency, + maxProvisionedThroughput, + minProvisionedConcurrency, + minProvisionedThroughput, + name, + provisionedModelUnits, + scaleToZeroEnabled, + workloadSize, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ServedEntityInputPb.class) + .add("entityName", entityName) + .add("entityVersion", entityVersion) + .add("environmentVars", environmentVars) + .add("externalModel", externalModel) + .add("instanceProfileArn", instanceProfileArn) + .add("maxProvisionedConcurrency", maxProvisionedConcurrency) + .add("maxProvisionedThroughput", maxProvisionedThroughput) + .add("minProvisionedConcurrency", minProvisionedConcurrency) + .add("minProvisionedThroughput", minProvisionedThroughput) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .add("scaleToZeroEnabled", scaleToZeroEnabled) + .add("workloadSize", workloadSize) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java index 129841ac9..e918316c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ServedEntityOutput.ServedEntityOutputSerializer.class) +@JsonDeserialize(using = ServedEntityOutput.ServedEntityOutputDeserializer.class) public class ServedEntityOutput { /** */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** */ - @JsonProperty("creator") private String creator; /** @@ -24,11 +33,9 @@ public class ServedEntityOutput { * it is a UC object, the full name of the object should be given in the form of * **catalog_name.schema_name.model_name**. */ - @JsonProperty("entity_name") private String entityName; /** */ - @JsonProperty("entity_version") private String entityVersion; /** @@ -37,7 +44,6 @@ public class ServedEntityOutput { * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": * "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` */ - @JsonProperty("environment_vars") private Map environmentVars; /** @@ -49,40 +55,33 @@ public class ServedEntityOutput { * add external_model later. The task type of all external models within an endpoint must be the * same. */ - @JsonProperty("external_model") private ExternalModel externalModel; /** * All fields are not sensitive as they are hard-coded in the system and made available to * customers. */ - @JsonProperty("foundation_model") private FoundationModel foundationModel; /** ARN of the instance profile that the served entity uses to access AWS resources. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** * The maximum provisioned concurrency that the endpoint can scale up to. Do not use if * workload_size is specified. */ - @JsonProperty("max_provisioned_concurrency") private Long maxProvisionedConcurrency; /** The maximum tokens per second that the endpoint can scale up to. */ - @JsonProperty("max_provisioned_throughput") private Long maxProvisionedThroughput; /** * The minimum provisioned concurrency that the endpoint can scale down to. Do not use if * workload_size is specified. */ - @JsonProperty("min_provisioned_concurrency") private Long minProvisionedConcurrency; /** The minimum tokens per second that the endpoint can scale down to. */ - @JsonProperty("min_provisioned_throughput") private Long minProvisionedThroughput; /** @@ -91,19 +90,15 @@ public class ServedEntityOutput { * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if * not specified for other entities, it defaults to entity_name-entity_version. */ - @JsonProperty("name") private String name; /** The number of model units provisioned. */ - @JsonProperty("provisioned_model_units") private Long provisionedModelUnits; /** Whether the compute resources for the served entity should scale down to zero. */ - @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; /** */ - @JsonProperty("state") private ServedModelState state; /** @@ -116,7 +111,6 @@ public class ServedEntityOutput { * workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency * are specified. */ - @JsonProperty("workload_size") private String workloadSize; /** @@ -128,7 +122,6 @@ public class ServedEntityOutput { *

[GPU types]: * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types */ - @JsonProperty("workload_type") private ServingModelWorkloadType workloadType; public ServedEntityOutput setCreationTimestamp(Long creationTimestamp) { @@ -364,4 +357,72 @@ public String toString() { .add("workloadType", workloadType) .toString(); } + + ServedEntityOutputPb toPb() { + ServedEntityOutputPb pb = new ServedEntityOutputPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setCreator(creator); + pb.setEntityName(entityName); + pb.setEntityVersion(entityVersion); + pb.setEnvironmentVars(environmentVars); + pb.setExternalModel(externalModel); + pb.setFoundationModel(foundationModel); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxProvisionedConcurrency(maxProvisionedConcurrency); + pb.setMaxProvisionedThroughput(maxProvisionedThroughput); + pb.setMinProvisionedConcurrency(minProvisionedConcurrency); + pb.setMinProvisionedThroughput(minProvisionedThroughput); + pb.setName(name); + pb.setProvisionedModelUnits(provisionedModelUnits); + pb.setScaleToZeroEnabled(scaleToZeroEnabled); + pb.setState(state); + pb.setWorkloadSize(workloadSize); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ServedEntityOutput fromPb(ServedEntityOutputPb pb) { + ServedEntityOutput model = new ServedEntityOutput(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCreator(pb.getCreator()); + model.setEntityName(pb.getEntityName()); + model.setEntityVersion(pb.getEntityVersion()); + model.setEnvironmentVars(pb.getEnvironmentVars()); + model.setExternalModel(pb.getExternalModel()); + model.setFoundationModel(pb.getFoundationModel()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxProvisionedConcurrency(pb.getMaxProvisionedConcurrency()); + model.setMaxProvisionedThroughput(pb.getMaxProvisionedThroughput()); + model.setMinProvisionedConcurrency(pb.getMinProvisionedConcurrency()); + model.setMinProvisionedThroughput(pb.getMinProvisionedThroughput()); + model.setName(pb.getName()); + model.setProvisionedModelUnits(pb.getProvisionedModelUnits()); + model.setScaleToZeroEnabled(pb.getScaleToZeroEnabled()); + model.setState(pb.getState()); + model.setWorkloadSize(pb.getWorkloadSize()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ServedEntityOutputSerializer extends JsonSerializer { + @Override + public void serialize(ServedEntityOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedEntityOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedEntityOutputDeserializer extends JsonDeserializer { + @Override + public ServedEntityOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedEntityOutputPb pb = mapper.readValue(p, ServedEntityOutputPb.class); + return ServedEntityOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutputPb.java new file mode 100755 index 000000000..b657ac975 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutputPb.java @@ -0,0 +1,300 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ServedEntityOutputPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("entity_name") + private String entityName; + + @JsonProperty("entity_version") + private String entityVersion; + + @JsonProperty("environment_vars") + private Map environmentVars; + + @JsonProperty("external_model") + private ExternalModel externalModel; + + @JsonProperty("foundation_model") + private FoundationModel foundationModel; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_provisioned_concurrency") + private Long maxProvisionedConcurrency; + + @JsonProperty("max_provisioned_throughput") + private Long maxProvisionedThroughput; + + @JsonProperty("min_provisioned_concurrency") + private Long minProvisionedConcurrency; + + @JsonProperty("min_provisioned_throughput") + private Long minProvisionedThroughput; + + @JsonProperty("name") + private String name; + + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + @JsonProperty("scale_to_zero_enabled") + private Boolean scaleToZeroEnabled; + + @JsonProperty("state") + private ServedModelState state; + + @JsonProperty("workload_size") + private String workloadSize; + + @JsonProperty("workload_type") + private ServingModelWorkloadType workloadType; + + public ServedEntityOutputPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ServedEntityOutputPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public ServedEntityOutputPb setEntityName(String entityName) { + this.entityName = entityName; + return this; + } + + public String getEntityName() { + return entityName; + } + + public ServedEntityOutputPb setEntityVersion(String entityVersion) { + this.entityVersion = entityVersion; + return this; + } + + public String getEntityVersion() { + return entityVersion; + } + + public ServedEntityOutputPb setEnvironmentVars(Map environmentVars) { + this.environmentVars = environmentVars; + return this; + } + + public Map getEnvironmentVars() { + return environmentVars; + } + + public ServedEntityOutputPb setExternalModel(ExternalModel externalModel) { + this.externalModel = externalModel; + return this; + } + + public ExternalModel getExternalModel() { + return externalModel; + } + + public ServedEntityOutputPb setFoundationModel(FoundationModel foundationModel) { + this.foundationModel = foundationModel; + return this; + } + + public FoundationModel getFoundationModel() { + return foundationModel; + } + + public ServedEntityOutputPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public ServedEntityOutputPb setMaxProvisionedConcurrency(Long maxProvisionedConcurrency) { + this.maxProvisionedConcurrency = maxProvisionedConcurrency; + return this; + } + + public Long getMaxProvisionedConcurrency() { + return maxProvisionedConcurrency; + } + + public ServedEntityOutputPb setMaxProvisionedThroughput(Long maxProvisionedThroughput) { + this.maxProvisionedThroughput = maxProvisionedThroughput; + return this; + } + + public Long getMaxProvisionedThroughput() { + return maxProvisionedThroughput; + } + + public ServedEntityOutputPb setMinProvisionedConcurrency(Long minProvisionedConcurrency) { + this.minProvisionedConcurrency = minProvisionedConcurrency; + return this; + } + + public Long getMinProvisionedConcurrency() { + return minProvisionedConcurrency; + } + + public ServedEntityOutputPb setMinProvisionedThroughput(Long minProvisionedThroughput) { + this.minProvisionedThroughput = minProvisionedThroughput; + return this; + } + + public Long getMinProvisionedThroughput() { + return minProvisionedThroughput; + } + + public ServedEntityOutputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServedEntityOutputPb setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + public ServedEntityOutputPb setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { + this.scaleToZeroEnabled = scaleToZeroEnabled; + return this; + } + + public Boolean getScaleToZeroEnabled() { + return scaleToZeroEnabled; + } + + public ServedEntityOutputPb setState(ServedModelState state) { + this.state = state; + return this; + } + + public ServedModelState getState() { + return state; + } + + public ServedEntityOutputPb setWorkloadSize(String workloadSize) { + this.workloadSize = workloadSize; + return this; + } + + public String getWorkloadSize() { + return workloadSize; + } + + public ServedEntityOutputPb setWorkloadType(ServingModelWorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public ServingModelWorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedEntityOutputPb that = (ServedEntityOutputPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(creator, that.creator) + && Objects.equals(entityName, that.entityName) + && Objects.equals(entityVersion, that.entityVersion) + && Objects.equals(environmentVars, that.environmentVars) + && Objects.equals(externalModel, that.externalModel) + && Objects.equals(foundationModel, that.foundationModel) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxProvisionedConcurrency, that.maxProvisionedConcurrency) + && Objects.equals(maxProvisionedThroughput, that.maxProvisionedThroughput) + && Objects.equals(minProvisionedConcurrency, that.minProvisionedConcurrency) + && Objects.equals(minProvisionedThroughput, that.minProvisionedThroughput) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) + && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) + && Objects.equals(state, that.state) + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + creator, + entityName, + entityVersion, + environmentVars, + externalModel, + foundationModel, + instanceProfileArn, + maxProvisionedConcurrency, + maxProvisionedThroughput, + minProvisionedConcurrency, + minProvisionedThroughput, + name, + provisionedModelUnits, + scaleToZeroEnabled, + state, + workloadSize, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ServedEntityOutputPb.class) + .add("creationTimestamp", creationTimestamp) + .add("creator", creator) + .add("entityName", entityName) + .add("entityVersion", entityVersion) + .add("environmentVars", environmentVars) + .add("externalModel", externalModel) + .add("foundationModel", foundationModel) + .add("instanceProfileArn", instanceProfileArn) + .add("maxProvisionedConcurrency", maxProvisionedConcurrency) + .add("maxProvisionedThroughput", maxProvisionedThroughput) + .add("minProvisionedConcurrency", minProvisionedConcurrency) + .add("minProvisionedThroughput", minProvisionedThroughput) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .add("scaleToZeroEnabled", scaleToZeroEnabled) + .add("state", state) + .add("workloadSize", workloadSize) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java index 8ed57eb23..a8866b38d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ServedEntitySpec.ServedEntitySpecSerializer.class) +@JsonDeserialize(using = ServedEntitySpec.ServedEntitySpecDeserializer.class) public class ServedEntitySpec { /** */ - @JsonProperty("entity_name") private String entityName; /** */ - @JsonProperty("entity_version") private String entityVersion; /** */ - @JsonProperty("external_model") private ExternalModel externalModel; /** * All fields are not sensitive as they are hard-coded in the system and made available to * customers. */ - @JsonProperty("foundation_model") private FoundationModel foundationModel; /** */ - @JsonProperty("name") private String name; public ServedEntitySpec setEntityName(String entityName) { @@ -104,4 +110,46 @@ public String toString() { .add("name", name) .toString(); } + + ServedEntitySpecPb toPb() { + ServedEntitySpecPb pb = new ServedEntitySpecPb(); + pb.setEntityName(entityName); + pb.setEntityVersion(entityVersion); + pb.setExternalModel(externalModel); + pb.setFoundationModel(foundationModel); + pb.setName(name); + + return pb; + } + + static ServedEntitySpec fromPb(ServedEntitySpecPb pb) { + ServedEntitySpec model = new ServedEntitySpec(); + model.setEntityName(pb.getEntityName()); + model.setEntityVersion(pb.getEntityVersion()); + model.setExternalModel(pb.getExternalModel()); + model.setFoundationModel(pb.getFoundationModel()); + model.setName(pb.getName()); + + return model; + } + + public static class ServedEntitySpecSerializer extends JsonSerializer { + @Override + public void serialize(ServedEntitySpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedEntitySpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedEntitySpecDeserializer extends JsonDeserializer { + @Override + public ServedEntitySpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedEntitySpecPb pb = mapper.readValue(p, ServedEntitySpecPb.class); + return ServedEntitySpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpecPb.java new file mode 100755 index 000000000..e6e383068 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpecPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServedEntitySpecPb { + @JsonProperty("entity_name") + private String entityName; + + @JsonProperty("entity_version") + private String entityVersion; + + @JsonProperty("external_model") + private ExternalModel externalModel; + + @JsonProperty("foundation_model") + private FoundationModel foundationModel; + + @JsonProperty("name") + private String name; + + public ServedEntitySpecPb setEntityName(String entityName) { + this.entityName = entityName; + return this; + } + + public String getEntityName() { + return entityName; + } + + public ServedEntitySpecPb setEntityVersion(String entityVersion) { + this.entityVersion = entityVersion; + return this; + } + + public String getEntityVersion() { + return entityVersion; + } + + public ServedEntitySpecPb setExternalModel(ExternalModel externalModel) { + this.externalModel = externalModel; + return this; + } + + public ExternalModel getExternalModel() { + return externalModel; + } + + public ServedEntitySpecPb setFoundationModel(FoundationModel foundationModel) { + this.foundationModel = foundationModel; + return this; + } + + public FoundationModel getFoundationModel() { + return foundationModel; + } + + public ServedEntitySpecPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedEntitySpecPb that = (ServedEntitySpecPb) o; + return Objects.equals(entityName, that.entityName) + && Objects.equals(entityVersion, that.entityVersion) + && Objects.equals(externalModel, that.externalModel) + && Objects.equals(foundationModel, that.foundationModel) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(entityName, entityVersion, externalModel, foundationModel, name); + } + + @Override + public String toString() { + return new ToStringer(ServedEntitySpecPb.class) + .add("entityName", entityName) + .add("entityVersion", entityVersion) + .add("externalModel", externalModel) + .add("foundationModel", foundationModel) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java index 93b608063..5d1e0d849 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ServedModelInput.ServedModelInputSerializer.class) +@JsonDeserialize(using = ServedModelInput.ServedModelInputDeserializer.class) public class ServedModelInput { /** * An object containing a set of optional, user-specified environment variable key-value pairs @@ -16,41 +27,33 @@ public class ServedModelInput { * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": * "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` */ - @JsonProperty("environment_vars") private Map environmentVars; /** ARN of the instance profile that the served entity uses to access AWS resources. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** * The maximum provisioned concurrency that the endpoint can scale up to. Do not use if * workload_size is specified. */ - @JsonProperty("max_provisioned_concurrency") private Long maxProvisionedConcurrency; /** The maximum tokens per second that the endpoint can scale up to. */ - @JsonProperty("max_provisioned_throughput") private Long maxProvisionedThroughput; /** * The minimum provisioned concurrency that the endpoint can scale down to. Do not use if * workload_size is specified. */ - @JsonProperty("min_provisioned_concurrency") private Long minProvisionedConcurrency; /** The minimum tokens per second that the endpoint can scale down to. */ - @JsonProperty("min_provisioned_throughput") private Long minProvisionedThroughput; /** */ - @JsonProperty("model_name") private String modelName; /** */ - @JsonProperty("model_version") private String modelVersion; /** @@ -59,15 +62,12 @@ public class ServedModelInput { * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if * not specified for other entities, it defaults to entity_name-entity_version. */ - @JsonProperty("name") private String name; /** The number of model units provisioned. */ - @JsonProperty("provisioned_model_units") private Long provisionedModelUnits; /** Whether the compute resources for the served entity should scale down to zero. */ - @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; /** @@ -80,7 +80,6 @@ public class ServedModelInput { * workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency * are specified. */ - @JsonProperty("workload_size") private String workloadSize; /** @@ -92,7 +91,6 @@ public class ServedModelInput { *

[GPU types]: * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types */ - @JsonProperty("workload_type") private ServedModelInputWorkloadType workloadType; public ServedModelInput setEnvironmentVars(Map environmentVars) { @@ -268,4 +266,62 @@ public String toString() { .add("workloadType", workloadType) .toString(); } + + ServedModelInputPb toPb() { + ServedModelInputPb pb = new ServedModelInputPb(); + pb.setEnvironmentVars(environmentVars); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxProvisionedConcurrency(maxProvisionedConcurrency); + pb.setMaxProvisionedThroughput(maxProvisionedThroughput); + pb.setMinProvisionedConcurrency(minProvisionedConcurrency); + pb.setMinProvisionedThroughput(minProvisionedThroughput); + pb.setModelName(modelName); + pb.setModelVersion(modelVersion); + pb.setName(name); + pb.setProvisionedModelUnits(provisionedModelUnits); + pb.setScaleToZeroEnabled(scaleToZeroEnabled); + pb.setWorkloadSize(workloadSize); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ServedModelInput fromPb(ServedModelInputPb pb) { + ServedModelInput model = new ServedModelInput(); + model.setEnvironmentVars(pb.getEnvironmentVars()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxProvisionedConcurrency(pb.getMaxProvisionedConcurrency()); + model.setMaxProvisionedThroughput(pb.getMaxProvisionedThroughput()); + model.setMinProvisionedConcurrency(pb.getMinProvisionedConcurrency()); + model.setMinProvisionedThroughput(pb.getMinProvisionedThroughput()); + model.setModelName(pb.getModelName()); + model.setModelVersion(pb.getModelVersion()); + model.setName(pb.getName()); + model.setProvisionedModelUnits(pb.getProvisionedModelUnits()); + model.setScaleToZeroEnabled(pb.getScaleToZeroEnabled()); + model.setWorkloadSize(pb.getWorkloadSize()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ServedModelInputSerializer extends JsonSerializer { + @Override + public void serialize(ServedModelInput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedModelInputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedModelInputDeserializer extends JsonDeserializer { + @Override + public ServedModelInput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedModelInputPb pb = mapper.readValue(p, ServedModelInputPb.class); + return ServedModelInput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputPb.java new file mode 100755 index 000000000..1ff992811 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ServedModelInputPb { + @JsonProperty("environment_vars") + private Map environmentVars; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_provisioned_concurrency") + private Long maxProvisionedConcurrency; + + @JsonProperty("max_provisioned_throughput") + private Long maxProvisionedThroughput; + + @JsonProperty("min_provisioned_concurrency") + private Long minProvisionedConcurrency; + + @JsonProperty("min_provisioned_throughput") + private Long minProvisionedThroughput; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("model_version") + private String modelVersion; + + @JsonProperty("name") + private String name; + + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + @JsonProperty("scale_to_zero_enabled") + private Boolean scaleToZeroEnabled; + + @JsonProperty("workload_size") + private String workloadSize; + + @JsonProperty("workload_type") + private ServedModelInputWorkloadType workloadType; + + public ServedModelInputPb setEnvironmentVars(Map environmentVars) { + this.environmentVars = environmentVars; + return this; + } + + public Map getEnvironmentVars() { + return environmentVars; + } + + public ServedModelInputPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public ServedModelInputPb setMaxProvisionedConcurrency(Long maxProvisionedConcurrency) { + this.maxProvisionedConcurrency = maxProvisionedConcurrency; + return this; + } + + public Long getMaxProvisionedConcurrency() { + return maxProvisionedConcurrency; + } + + public ServedModelInputPb setMaxProvisionedThroughput(Long maxProvisionedThroughput) { + this.maxProvisionedThroughput = maxProvisionedThroughput; + return this; + } + + public Long getMaxProvisionedThroughput() { + return maxProvisionedThroughput; + } + + public ServedModelInputPb setMinProvisionedConcurrency(Long minProvisionedConcurrency) { + this.minProvisionedConcurrency = minProvisionedConcurrency; + return this; + } + + public Long getMinProvisionedConcurrency() { + return minProvisionedConcurrency; + } + + public ServedModelInputPb setMinProvisionedThroughput(Long minProvisionedThroughput) { + this.minProvisionedThroughput = minProvisionedThroughput; + return this; + } + + public Long getMinProvisionedThroughput() { + return minProvisionedThroughput; + } + + public ServedModelInputPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ServedModelInputPb setModelVersion(String modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public String getModelVersion() { + return modelVersion; + } + + public ServedModelInputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServedModelInputPb setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + public ServedModelInputPb setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { + this.scaleToZeroEnabled = scaleToZeroEnabled; + return this; + } + + public Boolean getScaleToZeroEnabled() { + return scaleToZeroEnabled; + } + + public ServedModelInputPb setWorkloadSize(String workloadSize) { + this.workloadSize = workloadSize; + return this; + } + + public String getWorkloadSize() { + return workloadSize; + } + + public ServedModelInputPb setWorkloadType(ServedModelInputWorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public ServedModelInputWorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedModelInputPb that = (ServedModelInputPb) o; + return Objects.equals(environmentVars, that.environmentVars) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxProvisionedConcurrency, that.maxProvisionedConcurrency) + && Objects.equals(maxProvisionedThroughput, that.maxProvisionedThroughput) + && Objects.equals(minProvisionedConcurrency, that.minProvisionedConcurrency) + && Objects.equals(minProvisionedThroughput, that.minProvisionedThroughput) + && Objects.equals(modelName, that.modelName) + && Objects.equals(modelVersion, that.modelVersion) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) + && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + environmentVars, + instanceProfileArn, + maxProvisionedConcurrency, + maxProvisionedThroughput, + minProvisionedConcurrency, + minProvisionedThroughput, + modelName, + modelVersion, + name, + provisionedModelUnits, + scaleToZeroEnabled, + workloadSize, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ServedModelInputPb.class) + .add("environmentVars", environmentVars) + .add("instanceProfileArn", instanceProfileArn) + .add("maxProvisionedConcurrency", maxProvisionedConcurrency) + .add("maxProvisionedThroughput", maxProvisionedThroughput) + .add("minProvisionedConcurrency", minProvisionedConcurrency) + .add("minProvisionedThroughput", minProvisionedThroughput) + .add("modelName", modelName) + .add("modelVersion", modelVersion) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .add("scaleToZeroEnabled", scaleToZeroEnabled) + .add("workloadSize", workloadSize) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java index dfdc57241..7d68284c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ServedModelOutput.ServedModelOutputSerializer.class) +@JsonDeserialize(using = ServedModelOutput.ServedModelOutputDeserializer.class) public class ServedModelOutput { /** */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** */ - @JsonProperty("creator") private String creator; /** @@ -24,33 +33,27 @@ public class ServedModelOutput { * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": * "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` */ - @JsonProperty("environment_vars") private Map environmentVars; /** ARN of the instance profile that the served entity uses to access AWS resources. */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** * The maximum provisioned concurrency that the endpoint can scale up to. Do not use if * workload_size is specified. */ - @JsonProperty("max_provisioned_concurrency") private Long maxProvisionedConcurrency; /** * The minimum provisioned concurrency that the endpoint can scale down to. Do not use if * workload_size is specified. */ - @JsonProperty("min_provisioned_concurrency") private Long minProvisionedConcurrency; /** */ - @JsonProperty("model_name") private String modelName; /** */ - @JsonProperty("model_version") private String modelVersion; /** @@ -59,19 +62,15 @@ public class ServedModelOutput { * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if * not specified for other entities, it defaults to entity_name-entity_version. */ - @JsonProperty("name") private String name; /** The number of model units provisioned. */ - @JsonProperty("provisioned_model_units") private Long provisionedModelUnits; /** Whether the compute resources for the served entity should scale down to zero. */ - @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; /** */ - @JsonProperty("state") private ServedModelState state; /** @@ -84,7 +83,6 @@ public class ServedModelOutput { * workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency * are specified. */ - @JsonProperty("workload_size") private String workloadSize; /** @@ -96,7 +94,6 @@ public class ServedModelOutput { *

[GPU types]: * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types */ - @JsonProperty("workload_type") private ServingModelWorkloadType workloadType; public ServedModelOutput setCreationTimestamp(Long creationTimestamp) { @@ -284,4 +281,64 @@ public String toString() { .add("workloadType", workloadType) .toString(); } + + ServedModelOutputPb toPb() { + ServedModelOutputPb pb = new ServedModelOutputPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setCreator(creator); + pb.setEnvironmentVars(environmentVars); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxProvisionedConcurrency(maxProvisionedConcurrency); + pb.setMinProvisionedConcurrency(minProvisionedConcurrency); + pb.setModelName(modelName); + pb.setModelVersion(modelVersion); + pb.setName(name); + pb.setProvisionedModelUnits(provisionedModelUnits); + pb.setScaleToZeroEnabled(scaleToZeroEnabled); + pb.setState(state); + pb.setWorkloadSize(workloadSize); + pb.setWorkloadType(workloadType); + + return pb; + } + + static ServedModelOutput fromPb(ServedModelOutputPb pb) { + ServedModelOutput model = new ServedModelOutput(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCreator(pb.getCreator()); + model.setEnvironmentVars(pb.getEnvironmentVars()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxProvisionedConcurrency(pb.getMaxProvisionedConcurrency()); + model.setMinProvisionedConcurrency(pb.getMinProvisionedConcurrency()); + model.setModelName(pb.getModelName()); + model.setModelVersion(pb.getModelVersion()); + model.setName(pb.getName()); + model.setProvisionedModelUnits(pb.getProvisionedModelUnits()); + model.setScaleToZeroEnabled(pb.getScaleToZeroEnabled()); + model.setState(pb.getState()); + model.setWorkloadSize(pb.getWorkloadSize()); + model.setWorkloadType(pb.getWorkloadType()); + + return model; + } + + public static class ServedModelOutputSerializer extends JsonSerializer { + @Override + public void serialize(ServedModelOutput value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedModelOutputPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedModelOutputDeserializer extends JsonDeserializer { + @Override + public ServedModelOutput deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedModelOutputPb pb = mapper.readValue(p, ServedModelOutputPb.class); + return ServedModelOutput.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutputPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutputPb.java new file mode 100755 index 000000000..301f0c98d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutputPb.java @@ -0,0 +1,240 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ServedModelOutputPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("environment_vars") + private Map environmentVars; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_provisioned_concurrency") + private Long maxProvisionedConcurrency; + + @JsonProperty("min_provisioned_concurrency") + private Long minProvisionedConcurrency; + + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("model_version") + private String modelVersion; + + @JsonProperty("name") + private String name; + + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + @JsonProperty("scale_to_zero_enabled") + private Boolean scaleToZeroEnabled; + + @JsonProperty("state") + private ServedModelState state; + + @JsonProperty("workload_size") + private String workloadSize; + + @JsonProperty("workload_type") + private ServingModelWorkloadType workloadType; + + public ServedModelOutputPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ServedModelOutputPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public ServedModelOutputPb setEnvironmentVars(Map environmentVars) { + this.environmentVars = environmentVars; + return this; + } + + public Map getEnvironmentVars() { + return environmentVars; + } + + public ServedModelOutputPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public ServedModelOutputPb setMaxProvisionedConcurrency(Long maxProvisionedConcurrency) { + this.maxProvisionedConcurrency = maxProvisionedConcurrency; + return this; + } + + public Long getMaxProvisionedConcurrency() { + return maxProvisionedConcurrency; + } + + public ServedModelOutputPb setMinProvisionedConcurrency(Long minProvisionedConcurrency) { + this.minProvisionedConcurrency = minProvisionedConcurrency; + return this; + } + + public Long getMinProvisionedConcurrency() { + return minProvisionedConcurrency; + } + + public ServedModelOutputPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ServedModelOutputPb setModelVersion(String modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public String getModelVersion() { + return modelVersion; + } + + public ServedModelOutputPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServedModelOutputPb setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + public ServedModelOutputPb setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { + this.scaleToZeroEnabled = scaleToZeroEnabled; + return this; + } + + public Boolean getScaleToZeroEnabled() { + return scaleToZeroEnabled; + } + + public ServedModelOutputPb setState(ServedModelState state) { + this.state = state; + return this; + } + + public ServedModelState getState() { + return state; + } + + public ServedModelOutputPb setWorkloadSize(String workloadSize) { + this.workloadSize = workloadSize; + return this; + } + + public String getWorkloadSize() { + return workloadSize; + } + + public ServedModelOutputPb setWorkloadType(ServingModelWorkloadType workloadType) { + this.workloadType = workloadType; + return this; + } + + public ServingModelWorkloadType getWorkloadType() { + return workloadType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedModelOutputPb that = (ServedModelOutputPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(creator, that.creator) + && Objects.equals(environmentVars, that.environmentVars) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxProvisionedConcurrency, that.maxProvisionedConcurrency) + && Objects.equals(minProvisionedConcurrency, that.minProvisionedConcurrency) + && Objects.equals(modelName, that.modelName) + && Objects.equals(modelVersion, that.modelVersion) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) + && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) + && Objects.equals(state, that.state) + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + creator, + environmentVars, + instanceProfileArn, + maxProvisionedConcurrency, + minProvisionedConcurrency, + modelName, + modelVersion, + name, + provisionedModelUnits, + scaleToZeroEnabled, + state, + workloadSize, + workloadType); + } + + @Override + public String toString() { + return new ToStringer(ServedModelOutputPb.class) + .add("creationTimestamp", creationTimestamp) + .add("creator", creator) + .add("environmentVars", environmentVars) + .add("instanceProfileArn", instanceProfileArn) + .add("maxProvisionedConcurrency", maxProvisionedConcurrency) + .add("minProvisionedConcurrency", minProvisionedConcurrency) + .add("modelName", modelName) + .add("modelVersion", modelVersion) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .add("scaleToZeroEnabled", scaleToZeroEnabled) + .add("state", state) + .add("workloadSize", workloadSize) + .add("workloadType", workloadType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java index 233618bd9..89facc659 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ServedModelSpec.ServedModelSpecSerializer.class) +@JsonDeserialize(using = ServedModelSpec.ServedModelSpecDeserializer.class) public class ServedModelSpec { /** Only one of model_name and entity_name should be populated */ - @JsonProperty("model_name") private String modelName; /** Only one of model_version and entity_version should be populated */ - @JsonProperty("model_version") private String modelVersion; /** */ - @JsonProperty("name") private String name; public ServedModelSpec setModelName(String modelName) { @@ -71,4 +79,42 @@ public String toString() { .add("name", name) .toString(); } + + ServedModelSpecPb toPb() { + ServedModelSpecPb pb = new ServedModelSpecPb(); + pb.setModelName(modelName); + pb.setModelVersion(modelVersion); + pb.setName(name); + + return pb; + } + + static ServedModelSpec fromPb(ServedModelSpecPb pb) { + ServedModelSpec model = new ServedModelSpec(); + model.setModelName(pb.getModelName()); + model.setModelVersion(pb.getModelVersion()); + model.setName(pb.getName()); + + return model; + } + + public static class ServedModelSpecSerializer extends JsonSerializer { + @Override + public void serialize(ServedModelSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedModelSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedModelSpecDeserializer extends JsonDeserializer { + @Override + public ServedModelSpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedModelSpecPb pb = mapper.readValue(p, ServedModelSpecPb.class); + return ServedModelSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpecPb.java new file mode 100755 index 000000000..9a60b26be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpecPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServedModelSpecPb { + @JsonProperty("model_name") + private String modelName; + + @JsonProperty("model_version") + private String modelVersion; + + @JsonProperty("name") + private String name; + + public ServedModelSpecPb setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ServedModelSpecPb setModelVersion(String modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public String getModelVersion() { + return modelVersion; + } + + public ServedModelSpecPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedModelSpecPb that = (ServedModelSpecPb) o; + return Objects.equals(modelName, that.modelName) + && Objects.equals(modelVersion, that.modelVersion) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(modelName, modelVersion, name); + } + + @Override + public String toString() { + return new ToStringer(ServedModelSpecPb.class) + .add("modelName", modelName) + .add("modelVersion", modelVersion) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java index 4c71f5360..4f63dad7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ServedModelState.ServedModelStateSerializer.class) +@JsonDeserialize(using = ServedModelState.ServedModelStateDeserializer.class) public class ServedModelState { /** */ - @JsonProperty("deployment") private ServedModelStateDeployment deployment; /** */ - @JsonProperty("deployment_state_message") private String deploymentStateMessage; public ServedModelState setDeployment(ServedModelStateDeployment deployment) { @@ -56,4 +65,40 @@ public String toString() { .add("deploymentStateMessage", deploymentStateMessage) .toString(); } + + ServedModelStatePb toPb() { + ServedModelStatePb pb = new ServedModelStatePb(); + pb.setDeployment(deployment); + pb.setDeploymentStateMessage(deploymentStateMessage); + + return pb; + } + + static ServedModelState fromPb(ServedModelStatePb pb) { + ServedModelState model = new ServedModelState(); + model.setDeployment(pb.getDeployment()); + model.setDeploymentStateMessage(pb.getDeploymentStateMessage()); + + return model; + } + + public static class ServedModelStateSerializer extends JsonSerializer { + @Override + public void serialize(ServedModelState value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServedModelStatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServedModelStateDeserializer extends JsonDeserializer { + @Override + public ServedModelState deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServedModelStatePb pb = mapper.readValue(p, ServedModelStatePb.class); + return ServedModelState.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStatePb.java new file mode 100755 index 000000000..3f95955d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStatePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServedModelStatePb { + @JsonProperty("deployment") + private ServedModelStateDeployment deployment; + + @JsonProperty("deployment_state_message") + private String deploymentStateMessage; + + public ServedModelStatePb setDeployment(ServedModelStateDeployment deployment) { + this.deployment = deployment; + return this; + } + + public ServedModelStateDeployment getDeployment() { + return deployment; + } + + public ServedModelStatePb setDeploymentStateMessage(String deploymentStateMessage) { + this.deploymentStateMessage = deploymentStateMessage; + return this; + } + + public String getDeploymentStateMessage() { + return deploymentStateMessage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServedModelStatePb that = (ServedModelStatePb) o; + return Objects.equals(deployment, that.deployment) + && Objects.equals(deploymentStateMessage, that.deploymentStateMessage); + } + + @Override + public int hashCode() { + return Objects.hash(deployment, deploymentStateMessage); + } + + @Override + public String toString() { + return new ToStringer(ServedModelStatePb.class) + .add("deployment", deployment) + .add("deploymentStateMessage", deploymentStateMessage) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponse.java index 91991ae5e..21c03ecbb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ServerLogsResponse.ServerLogsResponseSerializer.class) +@JsonDeserialize(using = ServerLogsResponse.ServerLogsResponseDeserializer.class) public class ServerLogsResponse { /** The most recent log lines of the model server processing invocation requests. */ - @JsonProperty("logs") private String logs; public ServerLogsResponse setLogs(String logs) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(ServerLogsResponse.class).add("logs", logs).toString(); } + + ServerLogsResponsePb toPb() { + ServerLogsResponsePb pb = new ServerLogsResponsePb(); + pb.setLogs(logs); + + return pb; + } + + static ServerLogsResponse fromPb(ServerLogsResponsePb pb) { + ServerLogsResponse model = new ServerLogsResponse(); + model.setLogs(pb.getLogs()); + + return model; + } + + public static class ServerLogsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ServerLogsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServerLogsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServerLogsResponseDeserializer extends JsonDeserializer { + @Override + public ServerLogsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServerLogsResponsePb pb = mapper.readValue(p, ServerLogsResponsePb.class); + return ServerLogsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponsePb.java new file mode 100755 index 000000000..93f6fa62c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServerLogsResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServerLogsResponsePb { + @JsonProperty("logs") + private String logs; + + public ServerLogsResponsePb setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServerLogsResponsePb that = (ServerLogsResponsePb) o; + return Objects.equals(logs, that.logs); + } + + @Override + public int hashCode() { + return Objects.hash(logs); + } + + @Override + public String toString() { + return new ToStringer(ServerLogsResponsePb.class).add("logs", logs).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java index 9a0da78e1..4eeb65d8e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java @@ -4,58 +4,58 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ServingEndpoint.ServingEndpointSerializer.class) +@JsonDeserialize(using = ServingEndpoint.ServingEndpointDeserializer.class) public class ServingEndpoint { /** * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only * support inference tables. */ - @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; /** The budget policy associated with the endpoint. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** The config that is currently being served by the endpoint. */ - @JsonProperty("config") private EndpointCoreConfigSummary config; /** The timestamp when the endpoint was created in Unix time. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** The email of the user who created the serving endpoint. */ - @JsonProperty("creator") private String creator; /** System-generated ID of the endpoint, included to be used by the Permissions API. */ - @JsonProperty("id") private String id; /** The timestamp when the endpoint was last updated by a user in Unix time. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** The name of the serving endpoint. */ - @JsonProperty("name") private String name; /** Information corresponding to the state of the serving endpoint. */ - @JsonProperty("state") private EndpointState state; /** Tags attached to the serving endpoint. */ - @JsonProperty("tags") private Collection tags; /** The task type of the serving endpoint. */ - @JsonProperty("task") private String task; public ServingEndpoint setAiGateway(AiGatewayConfig aiGateway) { @@ -207,4 +207,58 @@ public String toString() { .add("task", task) .toString(); } + + ServingEndpointPb toPb() { + ServingEndpointPb pb = new ServingEndpointPb(); + pb.setAiGateway(aiGateway); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setConfig(config); + pb.setCreationTimestamp(creationTimestamp); + pb.setCreator(creator); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setName(name); + pb.setState(state); + pb.setTags(tags); + pb.setTask(task); + + return pb; + } + + static ServingEndpoint fromPb(ServingEndpointPb pb) { + ServingEndpoint model = new ServingEndpoint(); + model.setAiGateway(pb.getAiGateway()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setConfig(pb.getConfig()); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCreator(pb.getCreator()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setName(pb.getName()); + model.setState(pb.getState()); + model.setTags(pb.getTags()); + model.setTask(pb.getTask()); + + return model; + } + + public static class ServingEndpointSerializer extends JsonSerializer { + @Override + public void serialize(ServingEndpoint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointDeserializer extends JsonDeserializer { + @Override + public ServingEndpoint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointPb pb = mapper.readValue(p, ServingEndpointPb.class); + return ServingEndpoint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java index a1c66a4f3..2f9b18c40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java @@ -4,25 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ServingEndpointAccessControlRequest.ServingEndpointAccessControlRequestSerializer.class) +@JsonDeserialize( + using = + ServingEndpointAccessControlRequest.ServingEndpointAccessControlRequestDeserializer.class) public class ServingEndpointAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ServingEndpointAccessControlRequest setGroupName(String groupName) { @@ -87,4 +97,48 @@ public String toString() { .add("userName", userName) .toString(); } + + ServingEndpointAccessControlRequestPb toPb() { + ServingEndpointAccessControlRequestPb pb = new ServingEndpointAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ServingEndpointAccessControlRequest fromPb(ServingEndpointAccessControlRequestPb pb) { + ServingEndpointAccessControlRequest model = new ServingEndpointAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ServingEndpointAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointAccessControlRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointAccessControlRequestPb pb = + mapper.readValue(p, ServingEndpointAccessControlRequestPb.class); + return ServingEndpointAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequestPb.java new file mode 100755 index 000000000..04b245141 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServingEndpointAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private ServingEndpointPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ServingEndpointAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ServingEndpointAccessControlRequestPb setPermissionLevel( + ServingEndpointPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ServingEndpointPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ServingEndpointAccessControlRequestPb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ServingEndpointAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointAccessControlRequestPb that = (ServingEndpointAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponse.java index 092475ef8..4c241f756 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponse.java @@ -4,30 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ServingEndpointAccessControlResponse.ServingEndpointAccessControlResponseSerializer.class) +@JsonDeserialize( + using = + ServingEndpointAccessControlResponse.ServingEndpointAccessControlResponseDeserializer.class) public class ServingEndpointAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public ServingEndpointAccessControlResponse setAllPermissions( @@ -103,4 +113,50 @@ public String toString() { .add("userName", userName) .toString(); } + + ServingEndpointAccessControlResponsePb toPb() { + ServingEndpointAccessControlResponsePb pb = new ServingEndpointAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static ServingEndpointAccessControlResponse fromPb(ServingEndpointAccessControlResponsePb pb) { + ServingEndpointAccessControlResponse model = new ServingEndpointAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class ServingEndpointAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointAccessControlResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointAccessControlResponsePb pb = + mapper.readValue(p, ServingEndpointAccessControlResponsePb.class); + return ServingEndpointAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponsePb.java new file mode 100755 index 000000000..72f67c721 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlResponsePb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public ServingEndpointAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public ServingEndpointAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ServingEndpointAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public ServingEndpointAccessControlResponsePb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public ServingEndpointAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointAccessControlResponsePb that = (ServingEndpointAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java index b01c6e572..02395be66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java @@ -4,81 +4,76 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ServingEndpointDetailed.ServingEndpointDetailedSerializer.class) +@JsonDeserialize(using = ServingEndpointDetailed.ServingEndpointDetailedDeserializer.class) public class ServingEndpointDetailed { /** * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only * support inference tables. */ - @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; /** The budget policy associated with the endpoint. */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** The config that is currently being served by the endpoint. */ - @JsonProperty("config") private EndpointCoreConfigOutput config; /** The timestamp when the endpoint was created in Unix time. */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** The email of the user who created the serving endpoint. */ - @JsonProperty("creator") private String creator; /** Information required to query DataPlane APIs. */ - @JsonProperty("data_plane_info") private ModelDataPlaneInfo dataPlaneInfo; /** Endpoint invocation url if route optimization is enabled for endpoint */ - @JsonProperty("endpoint_url") private String endpointUrl; /** * System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions * API */ - @JsonProperty("id") private String id; /** The timestamp when the endpoint was last updated by a user in Unix time. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** The name of the serving endpoint. */ - @JsonProperty("name") private String name; /** The config that the endpoint is attempting to update to. */ - @JsonProperty("pending_config") private EndpointPendingConfig pendingConfig; /** The permission level of the principal making the request. */ - @JsonProperty("permission_level") private ServingEndpointDetailedPermissionLevel permissionLevel; /** Boolean representing if route optimization has been enabled for the endpoint */ - @JsonProperty("route_optimized") private Boolean routeOptimized; /** Information corresponding to the state of the serving endpoint. */ - @JsonProperty("state") private EndpointState state; /** Tags attached to the serving endpoint. */ - @JsonProperty("tags") private Collection tags; /** The task type of the serving endpoint. */ - @JsonProperty("task") private String task; public ServingEndpointDetailed setAiGateway(AiGatewayConfig aiGateway) { @@ -291,4 +286,71 @@ public String toString() { .add("task", task) .toString(); } + + ServingEndpointDetailedPb toPb() { + ServingEndpointDetailedPb pb = new ServingEndpointDetailedPb(); + pb.setAiGateway(aiGateway); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setConfig(config); + pb.setCreationTimestamp(creationTimestamp); + pb.setCreator(creator); + pb.setDataPlaneInfo(dataPlaneInfo); + pb.setEndpointUrl(endpointUrl); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setName(name); + pb.setPendingConfig(pendingConfig); + pb.setPermissionLevel(permissionLevel); + pb.setRouteOptimized(routeOptimized); + pb.setState(state); + pb.setTags(tags); + pb.setTask(task); + + return pb; + } + + static ServingEndpointDetailed fromPb(ServingEndpointDetailedPb pb) { + ServingEndpointDetailed model = new ServingEndpointDetailed(); + model.setAiGateway(pb.getAiGateway()); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setConfig(pb.getConfig()); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCreator(pb.getCreator()); + model.setDataPlaneInfo(pb.getDataPlaneInfo()); + model.setEndpointUrl(pb.getEndpointUrl()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setName(pb.getName()); + model.setPendingConfig(pb.getPendingConfig()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setRouteOptimized(pb.getRouteOptimized()); + model.setState(pb.getState()); + model.setTags(pb.getTags()); + model.setTask(pb.getTask()); + + return model; + } + + public static class ServingEndpointDetailedSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointDetailed value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointDetailedPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointDetailedDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointDetailed deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointDetailedPb pb = mapper.readValue(p, ServingEndpointDetailedPb.class); + return ServingEndpointDetailed.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPb.java new file mode 100755 index 000000000..3608317ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPb.java @@ -0,0 +1,271 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointDetailedPb { + @JsonProperty("ai_gateway") + private AiGatewayConfig aiGateway; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("config") + private EndpointCoreConfigOutput config; + + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("data_plane_info") + private ModelDataPlaneInfo dataPlaneInfo; + + @JsonProperty("endpoint_url") + private String endpointUrl; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("name") + private String name; + + @JsonProperty("pending_config") + private EndpointPendingConfig pendingConfig; + + @JsonProperty("permission_level") + private ServingEndpointDetailedPermissionLevel permissionLevel; + + @JsonProperty("route_optimized") + private Boolean routeOptimized; + + @JsonProperty("state") + private EndpointState state; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("task") + private String task; + + public ServingEndpointDetailedPb setAiGateway(AiGatewayConfig aiGateway) { + this.aiGateway = aiGateway; + return this; + } + + public AiGatewayConfig getAiGateway() { + return aiGateway; + } + + public ServingEndpointDetailedPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public ServingEndpointDetailedPb setConfig(EndpointCoreConfigOutput config) { + this.config = config; + return this; + } + + public EndpointCoreConfigOutput getConfig() { + return config; + } + + public ServingEndpointDetailedPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ServingEndpointDetailedPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public ServingEndpointDetailedPb setDataPlaneInfo(ModelDataPlaneInfo dataPlaneInfo) { + this.dataPlaneInfo = dataPlaneInfo; + return this; + } + + public ModelDataPlaneInfo getDataPlaneInfo() { + return dataPlaneInfo; + } + + public ServingEndpointDetailedPb setEndpointUrl(String endpointUrl) { + this.endpointUrl = endpointUrl; + return this; + } + + public String getEndpointUrl() { + return endpointUrl; + } + + public ServingEndpointDetailedPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ServingEndpointDetailedPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ServingEndpointDetailedPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServingEndpointDetailedPb setPendingConfig(EndpointPendingConfig pendingConfig) { + this.pendingConfig = pendingConfig; + return this; + } + + public EndpointPendingConfig getPendingConfig() { + return pendingConfig; + } + + public ServingEndpointDetailedPb setPermissionLevel( + ServingEndpointDetailedPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ServingEndpointDetailedPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public ServingEndpointDetailedPb setRouteOptimized(Boolean routeOptimized) { + this.routeOptimized = routeOptimized; + return this; + } + + public Boolean getRouteOptimized() { + return routeOptimized; + } + + public ServingEndpointDetailedPb setState(EndpointState state) { + this.state = state; + return this; + } + + public EndpointState getState() { + return state; + } + + public ServingEndpointDetailedPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ServingEndpointDetailedPb setTask(String task) { + this.task = task; + return this; + } + + public String getTask() { + return task; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointDetailedPb that = (ServingEndpointDetailedPb) o; + return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(config, that.config) + && Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(creator, that.creator) + && Objects.equals(dataPlaneInfo, that.dataPlaneInfo) + && Objects.equals(endpointUrl, that.endpointUrl) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(name, that.name) + && Objects.equals(pendingConfig, that.pendingConfig) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(routeOptimized, that.routeOptimized) + && Objects.equals(state, that.state) + && Objects.equals(tags, that.tags) + && Objects.equals(task, that.task); + } + + @Override + public int hashCode() { + return Objects.hash( + aiGateway, + budgetPolicyId, + config, + creationTimestamp, + creator, + dataPlaneInfo, + endpointUrl, + id, + lastUpdatedTimestamp, + name, + pendingConfig, + permissionLevel, + routeOptimized, + state, + tags, + task); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointDetailedPb.class) + .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) + .add("config", config) + .add("creationTimestamp", creationTimestamp) + .add("creator", creator) + .add("dataPlaneInfo", dataPlaneInfo) + .add("endpointUrl", endpointUrl) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("name", name) + .add("pendingConfig", pendingConfig) + .add("permissionLevel", permissionLevel) + .add("routeOptimized", routeOptimized) + .add("state", state) + .add("tags", tags) + .add("task", task) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPb.java new file mode 100755 index 000000000..13b60e61f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointPb { + @JsonProperty("ai_gateway") + private AiGatewayConfig aiGateway; + + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("config") + private EndpointCoreConfigSummary config; + + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("name") + private String name; + + @JsonProperty("state") + private EndpointState state; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("task") + private String task; + + public ServingEndpointPb setAiGateway(AiGatewayConfig aiGateway) { + this.aiGateway = aiGateway; + return this; + } + + public AiGatewayConfig getAiGateway() { + return aiGateway; + } + + public ServingEndpointPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public ServingEndpointPb setConfig(EndpointCoreConfigSummary config) { + this.config = config; + return this; + } + + public EndpointCoreConfigSummary getConfig() { + return config; + } + + public ServingEndpointPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public ServingEndpointPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public ServingEndpointPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ServingEndpointPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public ServingEndpointPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ServingEndpointPb setState(EndpointState state) { + this.state = state; + return this; + } + + public EndpointState getState() { + return state; + } + + public ServingEndpointPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ServingEndpointPb setTask(String task) { + this.task = task; + return this; + } + + public String getTask() { + return task; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointPb that = (ServingEndpointPb) o; + return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(config, that.config) + && Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(creator, that.creator) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(name, that.name) + && Objects.equals(state, that.state) + && Objects.equals(tags, that.tags) + && Objects.equals(task, that.task); + } + + @Override + public int hashCode() { + return Objects.hash( + aiGateway, + budgetPolicyId, + config, + creationTimestamp, + creator, + id, + lastUpdatedTimestamp, + name, + state, + tags, + task); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointPb.class) + .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) + .add("config", config) + .add("creationTimestamp", creationTimestamp) + .add("creator", creator) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("name", name) + .add("state", state) + .add("tags", tags) + .add("task", task) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java index 5f2f50a35..d53e41b45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ServingEndpointPermission.ServingEndpointPermissionSerializer.class) +@JsonDeserialize(using = ServingEndpointPermission.ServingEndpointPermissionDeserializer.class) public class ServingEndpointPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; public ServingEndpointPermission setInherited(Boolean inherited) { @@ -73,4 +81,45 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ServingEndpointPermissionPb toPb() { + ServingEndpointPermissionPb pb = new ServingEndpointPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ServingEndpointPermission fromPb(ServingEndpointPermissionPb pb) { + ServingEndpointPermission model = new ServingEndpointPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ServingEndpointPermissionSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointPermissionDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointPermissionPb pb = mapper.readValue(p, ServingEndpointPermissionPb.class); + return ServingEndpointPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionPb.java new file mode 100755 index 000000000..f882c8c87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private ServingEndpointPermissionLevel permissionLevel; + + public ServingEndpointPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public ServingEndpointPermissionPb setInheritedFromObject( + Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public ServingEndpointPermissionPb setPermissionLevel( + ServingEndpointPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ServingEndpointPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointPermissionPb that = (ServingEndpointPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissions.java index 4b5ca056d..9d209a89b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ServingEndpointPermissions.ServingEndpointPermissionsSerializer.class) +@JsonDeserialize(using = ServingEndpointPermissions.ServingEndpointPermissionsDeserializer.class) public class ServingEndpointPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public ServingEndpointPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + ServingEndpointPermissionsPb toPb() { + ServingEndpointPermissionsPb pb = new ServingEndpointPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static ServingEndpointPermissions fromPb(ServingEndpointPermissionsPb pb) { + ServingEndpointPermissions model = new ServingEndpointPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class ServingEndpointPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointPermissionsDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointPermissionsPb pb = mapper.readValue(p, ServingEndpointPermissionsPb.class); + return ServingEndpointPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java index 6d4100238..6c4c0af8e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java @@ -4,17 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + ServingEndpointPermissionsDescription.ServingEndpointPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = + ServingEndpointPermissionsDescription.ServingEndpointPermissionsDescriptionDeserializer + .class) public class ServingEndpointPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; public ServingEndpointPermissionsDescription setDescription(String description) { @@ -57,4 +71,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + ServingEndpointPermissionsDescriptionPb toPb() { + ServingEndpointPermissionsDescriptionPb pb = new ServingEndpointPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static ServingEndpointPermissionsDescription fromPb(ServingEndpointPermissionsDescriptionPb pb) { + ServingEndpointPermissionsDescription model = new ServingEndpointPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class ServingEndpointPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointPermissionsDescription deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointPermissionsDescriptionPb pb = + mapper.readValue(p, ServingEndpointPermissionsDescriptionPb.class); + return ServingEndpointPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescriptionPb.java new file mode 100755 index 000000000..a3f046d23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServingEndpointPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private ServingEndpointPermissionLevel permissionLevel; + + public ServingEndpointPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ServingEndpointPermissionsDescriptionPb setPermissionLevel( + ServingEndpointPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public ServingEndpointPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointPermissionsDescriptionPb that = (ServingEndpointPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsPb.java new file mode 100755 index 000000000..d921369df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public ServingEndpointPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ServingEndpointPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ServingEndpointPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointPermissionsPb that = (ServingEndpointPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequest.java index d3c6e7454..6ea97d265 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequest.java @@ -4,19 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ServingEndpointPermissionsRequest.ServingEndpointPermissionsRequestSerializer.class) +@JsonDeserialize( + using = ServingEndpointPermissionsRequest.ServingEndpointPermissionsRequestDeserializer.class) public class ServingEndpointPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The serving endpoint for which to get or manage permissions. */ - @JsonIgnore private String servingEndpointId; + private String servingEndpointId; public ServingEndpointPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +69,44 @@ public String toString() { .add("servingEndpointId", servingEndpointId) .toString(); } + + ServingEndpointPermissionsRequestPb toPb() { + ServingEndpointPermissionsRequestPb pb = new ServingEndpointPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setServingEndpointId(servingEndpointId); + + return pb; + } + + static ServingEndpointPermissionsRequest fromPb(ServingEndpointPermissionsRequestPb pb) { + ServingEndpointPermissionsRequest model = new ServingEndpointPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setServingEndpointId(pb.getServingEndpointId()); + + return model; + } + + public static class ServingEndpointPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ServingEndpointPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServingEndpointPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServingEndpointPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public ServingEndpointPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServingEndpointPermissionsRequestPb pb = + mapper.readValue(p, ServingEndpointPermissionsRequestPb.class); + return ServingEndpointPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequestPb.java new file mode 100755 index 000000000..637d5d994 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ServingEndpointPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String servingEndpointId; + + public ServingEndpointPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public ServingEndpointPermissionsRequestPb setServingEndpointId(String servingEndpointId) { + this.servingEndpointId = servingEndpointId; + return this; + } + + public String getServingEndpointId() { + return servingEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServingEndpointPermissionsRequestPb that = (ServingEndpointPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(servingEndpointId, that.servingEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, servingEndpointId); + } + + @Override + public String toString() { + return new ToStringer(ServingEndpointPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("servingEndpointId", servingEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index a6506b9dc..1940d9cc5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -24,7 +24,7 @@ public BuildLogsResponse buildLogs(BuildLogsRequest request) { request.getName(), request.getServedModelName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, BuildLogsResponse.class); } catch (IOException e) { @@ -37,7 +37,7 @@ public ServingEndpointDetailed create(CreateServingEndpoint request) { String path = "/api/2.0/serving-endpoints"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointDetailed.class); @@ -52,7 +52,7 @@ public ServingEndpointDetailed createProvisionedThroughputEndpoint( String path = "/api/2.0/serving-endpoints/pt"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointDetailed.class); @@ -66,7 +66,7 @@ public void delete(DeleteServingEndpointRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -78,7 +78,7 @@ public ExportMetricsResponse exportMetrics(ExportMetricsRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/metrics", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "text/plain"); return apiClient.execute(req, ExportMetricsResponse.class); } catch (IOException e) { @@ -91,7 +91,7 @@ public ServingEndpointDetailed get(GetServingEndpointRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ServingEndpointDetailed.class); } catch (IOException e) { @@ -104,7 +104,7 @@ public GetOpenApiResponse getOpenApi(GetOpenApiRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/openapi", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "text/plain"); return apiClient.execute(req, GetOpenApiResponse.class); } catch (IOException e) { @@ -121,7 +121,7 @@ public GetServingEndpointPermissionLevelsResponse getPermissionLevels( request.getServingEndpointId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetServingEndpointPermissionLevelsResponse.class); } catch (IOException e) { @@ -135,7 +135,7 @@ public ServingEndpointPermissions getPermissions(GetServingEndpointPermissionsRe String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ServingEndpointPermissions.class); } catch (IOException e) { @@ -148,7 +148,7 @@ public HttpRequestResponse httpRequest(ExternalFunctionRequest request) { String path = "/api/2.0/external-function"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "text/plain"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, HttpRequestResponse.class); @@ -177,7 +177,7 @@ public ServerLogsResponse logs(LogsRequest request) { request.getName(), request.getServedModelName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ServerLogsResponse.class); } catch (IOException e) { @@ -190,7 +190,7 @@ public EndpointTags patch(PatchServingEndpointTags request) { String path = String.format("/api/2.0/serving-endpoints/%s/tags", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EndpointTags.class); @@ -204,7 +204,7 @@ public PutResponse put(PutRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/rate-limits", request.getName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PutResponse.class); @@ -218,7 +218,7 @@ public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/ai-gateway", request.getName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PutAiGatewayResponse.class); @@ -232,7 +232,7 @@ public QueryEndpointResponse query(QueryEndpointInput request) { String path = String.format("/serving-endpoints/%s/invocations", request.getName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, QueryEndpointResponse.class); @@ -247,7 +247,7 @@ public ServingEndpointPermissions setPermissions(ServingEndpointPermissionsReque String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointPermissions.class); @@ -261,7 +261,7 @@ public ServingEndpointDetailed updateConfig(EndpointCoreConfigInput request) { String path = String.format("/api/2.0/serving-endpoints/%s/config", request.getName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointDetailed.class); @@ -276,7 +276,7 @@ public ServingEndpointPermissions updatePermissions(ServingEndpointPermissionsRe String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointPermissions.class); @@ -291,7 +291,7 @@ public ServingEndpointDetailed updateProvisionedThroughputEndpointConfig( String path = String.format("/api/2.0/serving-endpoints/pt/%s/config", request.getName()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ServingEndpointDetailed.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java index f90f40712..7cfd4b310 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TrafficConfig.TrafficConfigSerializer.class) +@JsonDeserialize(using = TrafficConfig.TrafficConfigDeserializer.class) public class TrafficConfig { /** The list of routes that define traffic to each served entity. */ - @JsonProperty("routes") private Collection routes; public TrafficConfig setRoutes(Collection routes) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(TrafficConfig.class).add("routes", routes).toString(); } + + TrafficConfigPb toPb() { + TrafficConfigPb pb = new TrafficConfigPb(); + pb.setRoutes(routes); + + return pb; + } + + static TrafficConfig fromPb(TrafficConfigPb pb) { + TrafficConfig model = new TrafficConfig(); + model.setRoutes(pb.getRoutes()); + + return model; + } + + public static class TrafficConfigSerializer extends JsonSerializer { + @Override + public void serialize(TrafficConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrafficConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrafficConfigDeserializer extends JsonDeserializer { + @Override + public TrafficConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrafficConfigPb pb = mapper.readValue(p, TrafficConfigPb.class); + return TrafficConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfigPb.java new file mode 100755 index 000000000..99528ee5e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfigPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TrafficConfigPb { + @JsonProperty("routes") + private Collection routes; + + public TrafficConfigPb setRoutes(Collection routes) { + this.routes = routes; + return this; + } + + public Collection getRoutes() { + return routes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrafficConfigPb that = (TrafficConfigPb) o; + return Objects.equals(routes, that.routes); + } + + @Override + public int hashCode() { + return Objects.hash(routes); + } + + @Override + public String toString() { + return new ToStringer(TrafficConfigPb.class).add("routes", routes).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java index 997aa5fbb..d891359bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java @@ -4,18 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateProvisionedThroughputEndpointConfigRequest + .UpdateProvisionedThroughputEndpointConfigRequestSerializer.class) +@JsonDeserialize( + using = + UpdateProvisionedThroughputEndpointConfigRequest + .UpdateProvisionedThroughputEndpointConfigRequestDeserializer.class) public class UpdateProvisionedThroughputEndpointConfigRequest { /** */ - @JsonProperty("config") private PtEndpointCoreConfig config; /** The name of the pt endpoint to update. This field is required. */ - @JsonIgnore private String name; + private String name; public UpdateProvisionedThroughputEndpointConfigRequest setConfig(PtEndpointCoreConfig config) { this.config = config; @@ -56,4 +71,49 @@ public String toString() { .add("name", name) .toString(); } + + UpdateProvisionedThroughputEndpointConfigRequestPb toPb() { + UpdateProvisionedThroughputEndpointConfigRequestPb pb = + new UpdateProvisionedThroughputEndpointConfigRequestPb(); + pb.setConfig(config); + pb.setName(name); + + return pb; + } + + static UpdateProvisionedThroughputEndpointConfigRequest fromPb( + UpdateProvisionedThroughputEndpointConfigRequestPb pb) { + UpdateProvisionedThroughputEndpointConfigRequest model = + new UpdateProvisionedThroughputEndpointConfigRequest(); + model.setConfig(pb.getConfig()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateProvisionedThroughputEndpointConfigRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateProvisionedThroughputEndpointConfigRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateProvisionedThroughputEndpointConfigRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProvisionedThroughputEndpointConfigRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateProvisionedThroughputEndpointConfigRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProvisionedThroughputEndpointConfigRequestPb pb = + mapper.readValue(p, UpdateProvisionedThroughputEndpointConfigRequestPb.class); + return UpdateProvisionedThroughputEndpointConfigRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequestPb.java new file mode 100755 index 000000000..14cde6584 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProvisionedThroughputEndpointConfigRequestPb { + @JsonProperty("config") + private PtEndpointCoreConfig config; + + @JsonIgnore private String name; + + public UpdateProvisionedThroughputEndpointConfigRequestPb setConfig(PtEndpointCoreConfig config) { + this.config = config; + return this; + } + + public PtEndpointCoreConfig getConfig() { + return config; + } + + public UpdateProvisionedThroughputEndpointConfigRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProvisionedThroughputEndpointConfigRequestPb that = + (UpdateProvisionedThroughputEndpointConfigRequestPb) o; + return Objects.equals(config, that.config) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(config, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateProvisionedThroughputEndpointConfigRequestPb.class) + .add("config", config) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java index d35f628e3..2de98bb9f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = V1ResponseChoiceElement.V1ResponseChoiceElementSerializer.class) +@JsonDeserialize(using = V1ResponseChoiceElement.V1ResponseChoiceElementDeserializer.class) public class V1ResponseChoiceElement { /** The finish reason returned by the endpoint. */ - @JsonProperty("finishReason") private String finishReason; /** The index of the choice in the __chat or completions__ response. */ - @JsonProperty("index") private Long index; /** The logprobs returned only by the __completions__ endpoint. */ - @JsonProperty("logprobs") private Long logprobs; /** The message response from the __chat__ endpoint. */ - @JsonProperty("message") private ChatMessage message; /** The text response from the __completions__ endpoint. */ - @JsonProperty("text") private String text; public V1ResponseChoiceElement setFinishReason(String finishReason) { @@ -101,4 +107,49 @@ public String toString() { .add("text", text) .toString(); } + + V1ResponseChoiceElementPb toPb() { + V1ResponseChoiceElementPb pb = new V1ResponseChoiceElementPb(); + pb.setFinishReason(finishReason); + pb.setIndex(index); + pb.setLogprobs(logprobs); + pb.setMessage(message); + pb.setText(text); + + return pb; + } + + static V1ResponseChoiceElement fromPb(V1ResponseChoiceElementPb pb) { + V1ResponseChoiceElement model = new V1ResponseChoiceElement(); + model.setFinishReason(pb.getFinishReason()); + model.setIndex(pb.getIndex()); + model.setLogprobs(pb.getLogprobs()); + model.setMessage(pb.getMessage()); + model.setText(pb.getText()); + + return model; + } + + public static class V1ResponseChoiceElementSerializer + extends JsonSerializer { + @Override + public void serialize( + V1ResponseChoiceElement value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + V1ResponseChoiceElementPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class V1ResponseChoiceElementDeserializer + extends JsonDeserializer { + @Override + public V1ResponseChoiceElement deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + V1ResponseChoiceElementPb pb = mapper.readValue(p, V1ResponseChoiceElementPb.class); + return V1ResponseChoiceElement.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElementPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElementPb.java new file mode 100755 index 000000000..d61d74499 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElementPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class V1ResponseChoiceElementPb { + @JsonProperty("finishReason") + private String finishReason; + + @JsonProperty("index") + private Long index; + + @JsonProperty("logprobs") + private Long logprobs; + + @JsonProperty("message") + private ChatMessage message; + + @JsonProperty("text") + private String text; + + public V1ResponseChoiceElementPb setFinishReason(String finishReason) { + this.finishReason = finishReason; + return this; + } + + public String getFinishReason() { + return finishReason; + } + + public V1ResponseChoiceElementPb setIndex(Long index) { + this.index = index; + return this; + } + + public Long getIndex() { + return index; + } + + public V1ResponseChoiceElementPb setLogprobs(Long logprobs) { + this.logprobs = logprobs; + return this; + } + + public Long getLogprobs() { + return logprobs; + } + + public V1ResponseChoiceElementPb setMessage(ChatMessage message) { + this.message = message; + return this; + } + + public ChatMessage getMessage() { + return message; + } + + public V1ResponseChoiceElementPb setText(String text) { + this.text = text; + return this; + } + + public String getText() { + return text; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + V1ResponseChoiceElementPb that = (V1ResponseChoiceElementPb) o; + return Objects.equals(finishReason, that.finishReason) + && Objects.equals(index, that.index) + && Objects.equals(logprobs, that.logprobs) + && Objects.equals(message, that.message) + && Objects.equals(text, that.text); + } + + @Override + public int hashCode() { + return Objects.hash(finishReason, index, logprobs, message, text); + } + + @Override + public String toString() { + return new ToStringer(V1ResponseChoiceElementPb.class) + .add("finishReason", finishReason) + .add("index", index) + .add("logprobs", logprobs) + .add("message", message) + .add("text", text) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java index 4b2251f62..2a40bd1a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountIpAccessEnable.AccountIpAccessEnableSerializer.class) +@JsonDeserialize(using = AccountIpAccessEnable.AccountIpAccessEnableDeserializer.class) public class AccountIpAccessEnable { /** */ - @JsonProperty("acct_ip_acl_enable") private BooleanMessage acctIpAclEnable; /** @@ -21,7 +31,6 @@ public class AccountIpAccessEnable { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class AccountIpAccessEnable { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public AccountIpAccessEnable setAcctIpAclEnable(BooleanMessage acctIpAclEnable) { @@ -83,4 +91,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + AccountIpAccessEnablePb toPb() { + AccountIpAccessEnablePb pb = new AccountIpAccessEnablePb(); + pb.setAcctIpAclEnable(acctIpAclEnable); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static AccountIpAccessEnable fromPb(AccountIpAccessEnablePb pb) { + AccountIpAccessEnable model = new AccountIpAccessEnable(); + model.setAcctIpAclEnable(pb.getAcctIpAclEnable()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class AccountIpAccessEnableSerializer + extends JsonSerializer { + @Override + public void serialize( + AccountIpAccessEnable value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountIpAccessEnablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountIpAccessEnableDeserializer + extends JsonDeserializer { + @Override + public AccountIpAccessEnable deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountIpAccessEnablePb pb = mapper.readValue(p, AccountIpAccessEnablePb.class); + return AccountIpAccessEnable.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnablePb.java new file mode 100755 index 000000000..3f38ee144 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnablePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountIpAccessEnablePb { + @JsonProperty("acct_ip_acl_enable") + private BooleanMessage acctIpAclEnable; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public AccountIpAccessEnablePb setAcctIpAclEnable(BooleanMessage acctIpAclEnable) { + this.acctIpAclEnable = acctIpAclEnable; + return this; + } + + public BooleanMessage getAcctIpAclEnable() { + return acctIpAclEnable; + } + + public AccountIpAccessEnablePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AccountIpAccessEnablePb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountIpAccessEnablePb that = (AccountIpAccessEnablePb) o; + return Objects.equals(acctIpAclEnable, that.acctIpAclEnable) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(acctIpAclEnable, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AccountIpAccessEnablePb.class) + .add("acctIpAclEnable", acctIpAclEnable) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java index eedc75e39..bacc535be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java @@ -22,7 +22,7 @@ public CreateIpAccessListResponse create(CreateIpAccessList request) { String.format("/api/2.0/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateIpAccessListResponse.class); @@ -39,7 +39,7 @@ public void delete(DeleteAccountIpAccessListRequest request) { apiClient.configuredAccountID(), request.getIpAccessListId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -54,7 +54,7 @@ public GetIpAccessListResponse get(GetAccountIpAccessListRequest request) { apiClient.configuredAccountID(), request.getIpAccessListId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetIpAccessListResponse.class); } catch (IOException e) { @@ -83,7 +83,7 @@ public void replace(ReplaceIpAccessList request) { apiClient.configuredAccountID(), request.getIpAccessListId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ReplaceResponse.class); } catch (IOException e) { @@ -99,7 +99,7 @@ public void update(UpdateIpAccessList request) { apiClient.configuredAccountID(), request.getIpAccessListId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java index d2516835c..44406e40c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccountNetworkPolicy.AccountNetworkPolicySerializer.class) +@JsonDeserialize(using = AccountNetworkPolicy.AccountNetworkPolicyDeserializer.class) public class AccountNetworkPolicy { /** The associated account ID for this Network Policy object. */ - @JsonProperty("account_id") private String accountId; /** The network policies applying for egress traffic. */ - @JsonProperty("egress") private NetworkPolicyEgress egress; /** The unique identifier for the network policy. */ - @JsonProperty("network_policy_id") private String networkPolicyId; public AccountNetworkPolicy setAccountId(String accountId) { @@ -71,4 +79,44 @@ public String toString() { .add("networkPolicyId", networkPolicyId) .toString(); } + + AccountNetworkPolicyPb toPb() { + AccountNetworkPolicyPb pb = new AccountNetworkPolicyPb(); + pb.setAccountId(accountId); + pb.setEgress(egress); + pb.setNetworkPolicyId(networkPolicyId); + + return pb; + } + + static AccountNetworkPolicy fromPb(AccountNetworkPolicyPb pb) { + AccountNetworkPolicy model = new AccountNetworkPolicy(); + model.setAccountId(pb.getAccountId()); + model.setEgress(pb.getEgress()); + model.setNetworkPolicyId(pb.getNetworkPolicyId()); + + return model; + } + + public static class AccountNetworkPolicySerializer extends JsonSerializer { + @Override + public void serialize( + AccountNetworkPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccountNetworkPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccountNetworkPolicyDeserializer + extends JsonDeserializer { + @Override + public AccountNetworkPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccountNetworkPolicyPb pb = mapper.readValue(p, AccountNetworkPolicyPb.class); + return AccountNetworkPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyPb.java new file mode 100755 index 000000000..e18037bfc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccountNetworkPolicyPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("egress") + private NetworkPolicyEgress egress; + + @JsonProperty("network_policy_id") + private String networkPolicyId; + + public AccountNetworkPolicyPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public AccountNetworkPolicyPb setEgress(NetworkPolicyEgress egress) { + this.egress = egress; + return this; + } + + public NetworkPolicyEgress getEgress() { + return egress; + } + + public AccountNetworkPolicyPb setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountNetworkPolicyPb that = (AccountNetworkPolicyPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(egress, that.egress) + && Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(accountId, egress, networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(AccountNetworkPolicyPb.class) + .add("accountId", accountId) + .add("egress", egress) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java index 698c78634..b8171cfd6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AibiDashboardEmbeddingAccessPolicy.AibiDashboardEmbeddingAccessPolicySerializer.class) +@JsonDeserialize( + using = AibiDashboardEmbeddingAccessPolicy.AibiDashboardEmbeddingAccessPolicyDeserializer.class) public class AibiDashboardEmbeddingAccessPolicy { /** */ - @JsonProperty("access_policy_type") private AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType; public AibiDashboardEmbeddingAccessPolicy setAccessPolicyType( @@ -42,4 +54,42 @@ public String toString() { .add("accessPolicyType", accessPolicyType) .toString(); } + + AibiDashboardEmbeddingAccessPolicyPb toPb() { + AibiDashboardEmbeddingAccessPolicyPb pb = new AibiDashboardEmbeddingAccessPolicyPb(); + pb.setAccessPolicyType(accessPolicyType); + + return pb; + } + + static AibiDashboardEmbeddingAccessPolicy fromPb(AibiDashboardEmbeddingAccessPolicyPb pb) { + AibiDashboardEmbeddingAccessPolicy model = new AibiDashboardEmbeddingAccessPolicy(); + model.setAccessPolicyType(pb.getAccessPolicyType()); + + return model; + } + + public static class AibiDashboardEmbeddingAccessPolicySerializer + extends JsonSerializer { + @Override + public void serialize( + AibiDashboardEmbeddingAccessPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AibiDashboardEmbeddingAccessPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AibiDashboardEmbeddingAccessPolicyDeserializer + extends JsonDeserializer { + @Override + public AibiDashboardEmbeddingAccessPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AibiDashboardEmbeddingAccessPolicyPb pb = + mapper.readValue(p, AibiDashboardEmbeddingAccessPolicyPb.class); + return AibiDashboardEmbeddingAccessPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java index 460c056e0..8192bab1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java @@ -22,7 +22,7 @@ public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse delete( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.class); } catch (IOException e) { @@ -36,7 +36,7 @@ public AibiDashboardEmbeddingAccessPolicySetting get( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AibiDashboardEmbeddingAccessPolicySetting.class); } catch (IOException e) { @@ -50,7 +50,7 @@ public AibiDashboardEmbeddingAccessPolicySetting update( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AibiDashboardEmbeddingAccessPolicySetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyPb.java new file mode 100755 index 000000000..b7265cf2b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AibiDashboardEmbeddingAccessPolicyPb { + @JsonProperty("access_policy_type") + private AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType; + + public AibiDashboardEmbeddingAccessPolicyPb setAccessPolicyType( + AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType) { + this.accessPolicyType = accessPolicyType; + return this; + } + + public AibiDashboardEmbeddingAccessPolicyAccessPolicyType getAccessPolicyType() { + return accessPolicyType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicyPb that = (AibiDashboardEmbeddingAccessPolicyPb) o; + return Objects.equals(accessPolicyType, that.accessPolicyType); + } + + @Override + public int hashCode() { + return Objects.hash(accessPolicyType); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicyPb.class) + .add("accessPolicyType", accessPolicyType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java index ead0e1b14..bdacbb866 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java @@ -4,13 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + AibiDashboardEmbeddingAccessPolicySetting + .AibiDashboardEmbeddingAccessPolicySettingSerializer.class) +@JsonDeserialize( + using = + AibiDashboardEmbeddingAccessPolicySetting + .AibiDashboardEmbeddingAccessPolicySettingDeserializer.class) public class AibiDashboardEmbeddingAccessPolicySetting { /** */ - @JsonProperty("aibi_dashboard_embedding_access_policy") private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy; /** @@ -21,7 +37,6 @@ public class AibiDashboardEmbeddingAccessPolicySetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +45,6 @@ public class AibiDashboardEmbeddingAccessPolicySetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public AibiDashboardEmbeddingAccessPolicySetting setAibiDashboardEmbeddingAccessPolicy( @@ -85,4 +99,51 @@ public String toString() { .add("settingName", settingName) .toString(); } + + AibiDashboardEmbeddingAccessPolicySettingPb toPb() { + AibiDashboardEmbeddingAccessPolicySettingPb pb = + new AibiDashboardEmbeddingAccessPolicySettingPb(); + pb.setAibiDashboardEmbeddingAccessPolicy(aibiDashboardEmbeddingAccessPolicy); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static AibiDashboardEmbeddingAccessPolicySetting fromPb( + AibiDashboardEmbeddingAccessPolicySettingPb pb) { + AibiDashboardEmbeddingAccessPolicySetting model = + new AibiDashboardEmbeddingAccessPolicySetting(); + model.setAibiDashboardEmbeddingAccessPolicy(pb.getAibiDashboardEmbeddingAccessPolicy()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class AibiDashboardEmbeddingAccessPolicySettingSerializer + extends JsonSerializer { + @Override + public void serialize( + AibiDashboardEmbeddingAccessPolicySetting value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + AibiDashboardEmbeddingAccessPolicySettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AibiDashboardEmbeddingAccessPolicySettingDeserializer + extends JsonDeserializer { + @Override + public AibiDashboardEmbeddingAccessPolicySetting deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AibiDashboardEmbeddingAccessPolicySettingPb pb = + mapper.readValue(p, AibiDashboardEmbeddingAccessPolicySettingPb.class); + return AibiDashboardEmbeddingAccessPolicySetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySettingPb.java new file mode 100755 index 000000000..003ade656 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySettingPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AibiDashboardEmbeddingAccessPolicySettingPb { + @JsonProperty("aibi_dashboard_embedding_access_policy") + private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingAccessPolicySettingPb setAibiDashboardEmbeddingAccessPolicy( + AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy) { + this.aibiDashboardEmbeddingAccessPolicy = aibiDashboardEmbeddingAccessPolicy; + return this; + } + + public AibiDashboardEmbeddingAccessPolicy getAibiDashboardEmbeddingAccessPolicy() { + return aibiDashboardEmbeddingAccessPolicy; + } + + public AibiDashboardEmbeddingAccessPolicySettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingAccessPolicySettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicySettingPb that = + (AibiDashboardEmbeddingAccessPolicySettingPb) o; + return Objects.equals( + aibiDashboardEmbeddingAccessPolicy, that.aibiDashboardEmbeddingAccessPolicy) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingAccessPolicy, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicySettingPb.class) + .add("aibiDashboardEmbeddingAccessPolicy", aibiDashboardEmbeddingAccessPolicy) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java index ecfa50971..9dc094dd2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + AibiDashboardEmbeddingApprovedDomains.AibiDashboardEmbeddingApprovedDomainsSerializer.class) +@JsonDeserialize( + using = + AibiDashboardEmbeddingApprovedDomains.AibiDashboardEmbeddingApprovedDomainsDeserializer + .class) public class AibiDashboardEmbeddingApprovedDomains { /** */ - @JsonProperty("approved_domains") private Collection approvedDomains; public AibiDashboardEmbeddingApprovedDomains setApprovedDomains( @@ -43,4 +58,42 @@ public String toString() { .add("approvedDomains", approvedDomains) .toString(); } + + AibiDashboardEmbeddingApprovedDomainsPb toPb() { + AibiDashboardEmbeddingApprovedDomainsPb pb = new AibiDashboardEmbeddingApprovedDomainsPb(); + pb.setApprovedDomains(approvedDomains); + + return pb; + } + + static AibiDashboardEmbeddingApprovedDomains fromPb(AibiDashboardEmbeddingApprovedDomainsPb pb) { + AibiDashboardEmbeddingApprovedDomains model = new AibiDashboardEmbeddingApprovedDomains(); + model.setApprovedDomains(pb.getApprovedDomains()); + + return model; + } + + public static class AibiDashboardEmbeddingApprovedDomainsSerializer + extends JsonSerializer { + @Override + public void serialize( + AibiDashboardEmbeddingApprovedDomains value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AibiDashboardEmbeddingApprovedDomainsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AibiDashboardEmbeddingApprovedDomainsDeserializer + extends JsonDeserializer { + @Override + public AibiDashboardEmbeddingApprovedDomains deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AibiDashboardEmbeddingApprovedDomainsPb pb = + mapper.readValue(p, AibiDashboardEmbeddingApprovedDomainsPb.class); + return AibiDashboardEmbeddingApprovedDomains.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java index 983226018..fc8a11346 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java @@ -23,7 +23,7 @@ public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse delete( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute( req, DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.class); @@ -38,7 +38,7 @@ public AibiDashboardEmbeddingApprovedDomainsSetting get( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AibiDashboardEmbeddingApprovedDomainsSetting.class); } catch (IOException e) { @@ -52,7 +52,7 @@ public AibiDashboardEmbeddingApprovedDomainsSetting update( String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AibiDashboardEmbeddingApprovedDomainsSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsPb.java new file mode 100755 index 000000000..828ebabb2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AibiDashboardEmbeddingApprovedDomainsPb { + @JsonProperty("approved_domains") + private Collection approvedDomains; + + public AibiDashboardEmbeddingApprovedDomainsPb setApprovedDomains( + Collection approvedDomains) { + this.approvedDomains = approvedDomains; + return this; + } + + public Collection getApprovedDomains() { + return approvedDomains; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomainsPb that = (AibiDashboardEmbeddingApprovedDomainsPb) o; + return Objects.equals(approvedDomains, that.approvedDomains); + } + + @Override + public int hashCode() { + return Objects.hash(approvedDomains); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomainsPb.class) + .add("approvedDomains", approvedDomains) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java index 14c060819..67e826849 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java @@ -4,13 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + AibiDashboardEmbeddingApprovedDomainsSetting + .AibiDashboardEmbeddingApprovedDomainsSettingSerializer.class) +@JsonDeserialize( + using = + AibiDashboardEmbeddingApprovedDomainsSetting + .AibiDashboardEmbeddingApprovedDomainsSettingDeserializer.class) public class AibiDashboardEmbeddingApprovedDomainsSetting { /** */ - @JsonProperty("aibi_dashboard_embedding_approved_domains") private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains; /** @@ -21,7 +37,6 @@ public class AibiDashboardEmbeddingApprovedDomainsSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +45,6 @@ public class AibiDashboardEmbeddingApprovedDomainsSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public AibiDashboardEmbeddingApprovedDomainsSetting setAibiDashboardEmbeddingApprovedDomains( @@ -86,4 +100,51 @@ public String toString() { .add("settingName", settingName) .toString(); } + + AibiDashboardEmbeddingApprovedDomainsSettingPb toPb() { + AibiDashboardEmbeddingApprovedDomainsSettingPb pb = + new AibiDashboardEmbeddingApprovedDomainsSettingPb(); + pb.setAibiDashboardEmbeddingApprovedDomains(aibiDashboardEmbeddingApprovedDomains); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static AibiDashboardEmbeddingApprovedDomainsSetting fromPb( + AibiDashboardEmbeddingApprovedDomainsSettingPb pb) { + AibiDashboardEmbeddingApprovedDomainsSetting model = + new AibiDashboardEmbeddingApprovedDomainsSetting(); + model.setAibiDashboardEmbeddingApprovedDomains(pb.getAibiDashboardEmbeddingApprovedDomains()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class AibiDashboardEmbeddingApprovedDomainsSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + AibiDashboardEmbeddingApprovedDomainsSetting value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + AibiDashboardEmbeddingApprovedDomainsSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AibiDashboardEmbeddingApprovedDomainsSettingDeserializer + extends JsonDeserializer { + @Override + public AibiDashboardEmbeddingApprovedDomainsSetting deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AibiDashboardEmbeddingApprovedDomainsSettingPb pb = + mapper.readValue(p, AibiDashboardEmbeddingApprovedDomainsSettingPb.class); + return AibiDashboardEmbeddingApprovedDomainsSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSettingPb.java new file mode 100755 index 000000000..47dc38268 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSettingPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AibiDashboardEmbeddingApprovedDomainsSettingPb { + @JsonProperty("aibi_dashboard_embedding_approved_domains") + private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingApprovedDomainsSettingPb setAibiDashboardEmbeddingApprovedDomains( + AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains) { + this.aibiDashboardEmbeddingApprovedDomains = aibiDashboardEmbeddingApprovedDomains; + return this; + } + + public AibiDashboardEmbeddingApprovedDomains getAibiDashboardEmbeddingApprovedDomains() { + return aibiDashboardEmbeddingApprovedDomains; + } + + public AibiDashboardEmbeddingApprovedDomainsSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingApprovedDomainsSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomainsSettingPb that = + (AibiDashboardEmbeddingApprovedDomainsSettingPb) o; + return Objects.equals( + aibiDashboardEmbeddingApprovedDomains, that.aibiDashboardEmbeddingApprovedDomains) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingApprovedDomains, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomainsSettingPb.class) + .add("aibiDashboardEmbeddingApprovedDomains", aibiDashboardEmbeddingApprovedDomains) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java index 381e6963e..8beafb683 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java @@ -21,7 +21,7 @@ public AutomaticClusterUpdateSetting get(GetAutomaticClusterUpdateSettingRequest String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AutomaticClusterUpdateSetting.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public AutomaticClusterUpdateSetting update(UpdateAutomaticClusterUpdateSettingR String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AutomaticClusterUpdateSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java index 18645e76b..1118e0afa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AutomaticClusterUpdateSetting.AutomaticClusterUpdateSettingSerializer.class) +@JsonDeserialize( + using = AutomaticClusterUpdateSetting.AutomaticClusterUpdateSettingDeserializer.class) public class AutomaticClusterUpdateSetting { /** */ - @JsonProperty("automatic_cluster_update_workspace") private ClusterAutoRestartMessage automaticClusterUpdateWorkspace; /** @@ -21,7 +32,6 @@ public class AutomaticClusterUpdateSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +40,6 @@ public class AutomaticClusterUpdateSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public AutomaticClusterUpdateSetting setAutomaticClusterUpdateWorkspace( @@ -84,4 +93,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + AutomaticClusterUpdateSettingPb toPb() { + AutomaticClusterUpdateSettingPb pb = new AutomaticClusterUpdateSettingPb(); + pb.setAutomaticClusterUpdateWorkspace(automaticClusterUpdateWorkspace); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static AutomaticClusterUpdateSetting fromPb(AutomaticClusterUpdateSettingPb pb) { + AutomaticClusterUpdateSetting model = new AutomaticClusterUpdateSetting(); + model.setAutomaticClusterUpdateWorkspace(pb.getAutomaticClusterUpdateWorkspace()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class AutomaticClusterUpdateSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + AutomaticClusterUpdateSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AutomaticClusterUpdateSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AutomaticClusterUpdateSettingDeserializer + extends JsonDeserializer { + @Override + public AutomaticClusterUpdateSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AutomaticClusterUpdateSettingPb pb = + mapper.readValue(p, AutomaticClusterUpdateSettingPb.class); + return AutomaticClusterUpdateSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSettingPb.java new file mode 100755 index 000000000..58e4eee9b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSettingPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AutomaticClusterUpdateSettingPb { + @JsonProperty("automatic_cluster_update_workspace") + private ClusterAutoRestartMessage automaticClusterUpdateWorkspace; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public AutomaticClusterUpdateSettingPb setAutomaticClusterUpdateWorkspace( + ClusterAutoRestartMessage automaticClusterUpdateWorkspace) { + this.automaticClusterUpdateWorkspace = automaticClusterUpdateWorkspace; + return this; + } + + public ClusterAutoRestartMessage getAutomaticClusterUpdateWorkspace() { + return automaticClusterUpdateWorkspace; + } + + public AutomaticClusterUpdateSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AutomaticClusterUpdateSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutomaticClusterUpdateSettingPb that = (AutomaticClusterUpdateSettingPb) o; + return Objects.equals(automaticClusterUpdateWorkspace, that.automaticClusterUpdateWorkspace) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(automaticClusterUpdateWorkspace, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AutomaticClusterUpdateSettingPb.class) + .add("automaticClusterUpdateWorkspace", automaticClusterUpdateWorkspace) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java index 6eed151d3..44dd38dcd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = BooleanMessage.BooleanMessageSerializer.class) +@JsonDeserialize(using = BooleanMessage.BooleanMessageDeserializer.class) public class BooleanMessage { /** */ - @JsonProperty("value") private Boolean value; public BooleanMessage setValue(Boolean value) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(BooleanMessage.class).add("value", value).toString(); } + + BooleanMessagePb toPb() { + BooleanMessagePb pb = new BooleanMessagePb(); + pb.setValue(value); + + return pb; + } + + static BooleanMessage fromPb(BooleanMessagePb pb) { + BooleanMessage model = new BooleanMessage(); + model.setValue(pb.getValue()); + + return model; + } + + public static class BooleanMessageSerializer extends JsonSerializer { + @Override + public void serialize(BooleanMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BooleanMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BooleanMessageDeserializer extends JsonDeserializer { + @Override + public BooleanMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BooleanMessagePb pb = mapper.readValue(p, BooleanMessagePb.class); + return BooleanMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessagePb.java new file mode 100755 index 000000000..cb30835b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessagePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class BooleanMessagePb { + @JsonProperty("value") + private Boolean value; + + public BooleanMessagePb setValue(Boolean value) { + this.value = value; + return this; + } + + public Boolean getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BooleanMessagePb that = (BooleanMessagePb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(BooleanMessagePb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java index 751529e58..0054a0ca1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClusterAutoRestartMessage.ClusterAutoRestartMessageSerializer.class) +@JsonDeserialize(using = ClusterAutoRestartMessage.ClusterAutoRestartMessageDeserializer.class) public class ClusterAutoRestartMessage { /** */ - @JsonProperty("can_toggle") private Boolean canToggle; /** */ - @JsonProperty("enabled") private Boolean enabled; /** @@ -24,15 +33,12 @@ public class ClusterAutoRestartMessage { * message to the customer with the additional details. For example, using these details we can * check why exactly the feature is disabled for this customer. */ - @JsonProperty("enablement_details") private ClusterAutoRestartMessageEnablementDetails enablementDetails; /** */ - @JsonProperty("maintenance_window") private ClusterAutoRestartMessageMaintenanceWindow maintenanceWindow; /** */ - @JsonProperty("restart_even_if_no_updates_available") private Boolean restartEvenIfNoUpdatesAvailable; public ClusterAutoRestartMessage setCanToggle(Boolean canToggle) { @@ -111,4 +117,49 @@ public String toString() { .add("restartEvenIfNoUpdatesAvailable", restartEvenIfNoUpdatesAvailable) .toString(); } + + ClusterAutoRestartMessagePb toPb() { + ClusterAutoRestartMessagePb pb = new ClusterAutoRestartMessagePb(); + pb.setCanToggle(canToggle); + pb.setEnabled(enabled); + pb.setEnablementDetails(enablementDetails); + pb.setMaintenanceWindow(maintenanceWindow); + pb.setRestartEvenIfNoUpdatesAvailable(restartEvenIfNoUpdatesAvailable); + + return pb; + } + + static ClusterAutoRestartMessage fromPb(ClusterAutoRestartMessagePb pb) { + ClusterAutoRestartMessage model = new ClusterAutoRestartMessage(); + model.setCanToggle(pb.getCanToggle()); + model.setEnabled(pb.getEnabled()); + model.setEnablementDetails(pb.getEnablementDetails()); + model.setMaintenanceWindow(pb.getMaintenanceWindow()); + model.setRestartEvenIfNoUpdatesAvailable(pb.getRestartEvenIfNoUpdatesAvailable()); + + return model; + } + + public static class ClusterAutoRestartMessageSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAutoRestartMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClusterAutoRestartMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAutoRestartMessageDeserializer + extends JsonDeserializer { + @Override + public ClusterAutoRestartMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAutoRestartMessagePb pb = mapper.readValue(p, ClusterAutoRestartMessagePb.class); + return ClusterAutoRestartMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java index 9f8a4e2c6..a97dd41fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -15,20 +24,25 @@ * the feature is disabled for this customer. */ @Generated +@JsonSerialize( + using = + ClusterAutoRestartMessageEnablementDetails + .ClusterAutoRestartMessageEnablementDetailsSerializer.class) +@JsonDeserialize( + using = + ClusterAutoRestartMessageEnablementDetails + .ClusterAutoRestartMessageEnablementDetailsDeserializer.class) public class ClusterAutoRestartMessageEnablementDetails { /** The feature is force enabled if compliance mode is active */ - @JsonProperty("forced_for_compliance_mode") private Boolean forcedForComplianceMode; /** * The feature is unavailable if the corresponding entitlement disabled (see * getShieldEntitlementEnable) */ - @JsonProperty("unavailable_for_disabled_entitlement") private Boolean unavailableForDisabledEntitlement; /** The feature is unavailable if the customer doesn't have enterprise tier */ - @JsonProperty("unavailable_for_non_enterprise_tier") private Boolean unavailableForNonEnterpriseTier; public ClusterAutoRestartMessageEnablementDetails setForcedForComplianceMode( @@ -88,4 +102,51 @@ public String toString() { .add("unavailableForNonEnterpriseTier", unavailableForNonEnterpriseTier) .toString(); } + + ClusterAutoRestartMessageEnablementDetailsPb toPb() { + ClusterAutoRestartMessageEnablementDetailsPb pb = + new ClusterAutoRestartMessageEnablementDetailsPb(); + pb.setForcedForComplianceMode(forcedForComplianceMode); + pb.setUnavailableForDisabledEntitlement(unavailableForDisabledEntitlement); + pb.setUnavailableForNonEnterpriseTier(unavailableForNonEnterpriseTier); + + return pb; + } + + static ClusterAutoRestartMessageEnablementDetails fromPb( + ClusterAutoRestartMessageEnablementDetailsPb pb) { + ClusterAutoRestartMessageEnablementDetails model = + new ClusterAutoRestartMessageEnablementDetails(); + model.setForcedForComplianceMode(pb.getForcedForComplianceMode()); + model.setUnavailableForDisabledEntitlement(pb.getUnavailableForDisabledEntitlement()); + model.setUnavailableForNonEnterpriseTier(pb.getUnavailableForNonEnterpriseTier()); + + return model; + } + + public static class ClusterAutoRestartMessageEnablementDetailsSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAutoRestartMessageEnablementDetails value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ClusterAutoRestartMessageEnablementDetailsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAutoRestartMessageEnablementDetailsDeserializer + extends JsonDeserializer { + @Override + public ClusterAutoRestartMessageEnablementDetails deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAutoRestartMessageEnablementDetailsPb pb = + mapper.readValue(p, ClusterAutoRestartMessageEnablementDetailsPb.class); + return ClusterAutoRestartMessageEnablementDetails.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetailsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetailsPb.java new file mode 100755 index 000000000..caa3b9018 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetailsPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Contains an information about the enablement status judging (e.g. whether the enterprise tier is + * enabled) This is only additional information that MUST NOT be used to decide whether the setting + * is enabled or not. This is intended to use only for purposes like showing an error message to the + * customer with the additional details. For example, using these details we can check why exactly + * the feature is disabled for this customer. + */ +@Generated +class ClusterAutoRestartMessageEnablementDetailsPb { + @JsonProperty("forced_for_compliance_mode") + private Boolean forcedForComplianceMode; + + @JsonProperty("unavailable_for_disabled_entitlement") + private Boolean unavailableForDisabledEntitlement; + + @JsonProperty("unavailable_for_non_enterprise_tier") + private Boolean unavailableForNonEnterpriseTier; + + public ClusterAutoRestartMessageEnablementDetailsPb setForcedForComplianceMode( + Boolean forcedForComplianceMode) { + this.forcedForComplianceMode = forcedForComplianceMode; + return this; + } + + public Boolean getForcedForComplianceMode() { + return forcedForComplianceMode; + } + + public ClusterAutoRestartMessageEnablementDetailsPb setUnavailableForDisabledEntitlement( + Boolean unavailableForDisabledEntitlement) { + this.unavailableForDisabledEntitlement = unavailableForDisabledEntitlement; + return this; + } + + public Boolean getUnavailableForDisabledEntitlement() { + return unavailableForDisabledEntitlement; + } + + public ClusterAutoRestartMessageEnablementDetailsPb setUnavailableForNonEnterpriseTier( + Boolean unavailableForNonEnterpriseTier) { + this.unavailableForNonEnterpriseTier = unavailableForNonEnterpriseTier; + return this; + } + + public Boolean getUnavailableForNonEnterpriseTier() { + return unavailableForNonEnterpriseTier; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAutoRestartMessageEnablementDetailsPb that = + (ClusterAutoRestartMessageEnablementDetailsPb) o; + return Objects.equals(forcedForComplianceMode, that.forcedForComplianceMode) + && Objects.equals(unavailableForDisabledEntitlement, that.unavailableForDisabledEntitlement) + && Objects.equals(unavailableForNonEnterpriseTier, that.unavailableForNonEnterpriseTier); + } + + @Override + public int hashCode() { + return Objects.hash( + forcedForComplianceMode, + unavailableForDisabledEntitlement, + unavailableForNonEnterpriseTier); + } + + @Override + public String toString() { + return new ToStringer(ClusterAutoRestartMessageEnablementDetailsPb.class) + .add("forcedForComplianceMode", forcedForComplianceMode) + .add("unavailableForDisabledEntitlement", unavailableForDisabledEntitlement) + .add("unavailableForNonEnterpriseTier", unavailableForNonEnterpriseTier) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindow.java index cdef6ec8d..ca90b524f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindow.java @@ -4,13 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + ClusterAutoRestartMessageMaintenanceWindow + .ClusterAutoRestartMessageMaintenanceWindowSerializer.class) +@JsonDeserialize( + using = + ClusterAutoRestartMessageMaintenanceWindow + .ClusterAutoRestartMessageMaintenanceWindowDeserializer.class) public class ClusterAutoRestartMessageMaintenanceWindow { /** */ - @JsonProperty("week_day_based_schedule") private ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule weekDayBasedSchedule; public ClusterAutoRestartMessageMaintenanceWindow setWeekDayBasedSchedule( @@ -43,4 +59,47 @@ public String toString() { .add("weekDayBasedSchedule", weekDayBasedSchedule) .toString(); } + + ClusterAutoRestartMessageMaintenanceWindowPb toPb() { + ClusterAutoRestartMessageMaintenanceWindowPb pb = + new ClusterAutoRestartMessageMaintenanceWindowPb(); + pb.setWeekDayBasedSchedule(weekDayBasedSchedule); + + return pb; + } + + static ClusterAutoRestartMessageMaintenanceWindow fromPb( + ClusterAutoRestartMessageMaintenanceWindowPb pb) { + ClusterAutoRestartMessageMaintenanceWindow model = + new ClusterAutoRestartMessageMaintenanceWindow(); + model.setWeekDayBasedSchedule(pb.getWeekDayBasedSchedule()); + + return model; + } + + public static class ClusterAutoRestartMessageMaintenanceWindowSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAutoRestartMessageMaintenanceWindow value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ClusterAutoRestartMessageMaintenanceWindowPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAutoRestartMessageMaintenanceWindowDeserializer + extends JsonDeserializer { + @Override + public ClusterAutoRestartMessageMaintenanceWindow deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAutoRestartMessageMaintenanceWindowPb pb = + mapper.readValue(p, ClusterAutoRestartMessageMaintenanceWindowPb.class); + return ClusterAutoRestartMessageMaintenanceWindow.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowPb.java new file mode 100755 index 000000000..7dc10aef7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterAutoRestartMessageMaintenanceWindowPb { + @JsonProperty("week_day_based_schedule") + private ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule weekDayBasedSchedule; + + public ClusterAutoRestartMessageMaintenanceWindowPb setWeekDayBasedSchedule( + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule weekDayBasedSchedule) { + this.weekDayBasedSchedule = weekDayBasedSchedule; + return this; + } + + public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule getWeekDayBasedSchedule() { + return weekDayBasedSchedule; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAutoRestartMessageMaintenanceWindowPb that = + (ClusterAutoRestartMessageMaintenanceWindowPb) o; + return Objects.equals(weekDayBasedSchedule, that.weekDayBasedSchedule); + } + + @Override + public int hashCode() { + return Objects.hash(weekDayBasedSchedule); + } + + @Override + public String toString() { + return new ToStringer(ClusterAutoRestartMessageMaintenanceWindowPb.class) + .add("weekDayBasedSchedule", weekDayBasedSchedule) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule.java index b4b7b3ced..6c7b3b2ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule + .ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedScheduleSerializer.class) +@JsonDeserialize( + using = + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule + .ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedScheduleDeserializer.class) public class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule { /** */ - @JsonProperty("day_of_week") private ClusterAutoRestartMessageMaintenanceWindowDayOfWeek dayOfWeek; /** */ - @JsonProperty("frequency") private ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency frequency; /** */ - @JsonProperty("window_start_time") private ClusterAutoRestartMessageMaintenanceWindowWindowStartTime windowStartTime; public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule setDayOfWeek( @@ -75,4 +89,52 @@ public String toString() { .add("windowStartTime", windowStartTime) .toString(); } + + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb toPb() { + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb pb = + new ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb(); + pb.setDayOfWeek(dayOfWeek); + pb.setFrequency(frequency); + pb.setWindowStartTime(windowStartTime); + + return pb; + } + + static ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule fromPb( + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb pb) { + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule model = + new ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule(); + model.setDayOfWeek(pb.getDayOfWeek()); + model.setFrequency(pb.getFrequency()); + model.setWindowStartTime(pb.getWindowStartTime()); + + return model; + } + + public static class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedScheduleSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedScheduleDeserializer + extends JsonDeserializer { + @Override + public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb pb = + mapper.readValue( + p, ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb.class); + return ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb.java new file mode 100755 index 000000000..36e0f3812 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb { + @JsonProperty("day_of_week") + private ClusterAutoRestartMessageMaintenanceWindowDayOfWeek dayOfWeek; + + @JsonProperty("frequency") + private ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency frequency; + + @JsonProperty("window_start_time") + private ClusterAutoRestartMessageMaintenanceWindowWindowStartTime windowStartTime; + + public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb setDayOfWeek( + ClusterAutoRestartMessageMaintenanceWindowDayOfWeek dayOfWeek) { + this.dayOfWeek = dayOfWeek; + return this; + } + + public ClusterAutoRestartMessageMaintenanceWindowDayOfWeek getDayOfWeek() { + return dayOfWeek; + } + + public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb setFrequency( + ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency frequency) { + this.frequency = frequency; + return this; + } + + public ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency getFrequency() { + return frequency; + } + + public ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb setWindowStartTime( + ClusterAutoRestartMessageMaintenanceWindowWindowStartTime windowStartTime) { + this.windowStartTime = windowStartTime; + return this; + } + + public ClusterAutoRestartMessageMaintenanceWindowWindowStartTime getWindowStartTime() { + return windowStartTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb that = + (ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb) o; + return Objects.equals(dayOfWeek, that.dayOfWeek) + && Objects.equals(frequency, that.frequency) + && Objects.equals(windowStartTime, that.windowStartTime); + } + + @Override + public int hashCode() { + return Objects.hash(dayOfWeek, frequency, windowStartTime); + } + + @Override + public String toString() { + return new ToStringer(ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedulePb.class) + .add("dayOfWeek", dayOfWeek) + .add("frequency", frequency) + .add("windowStartTime", windowStartTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTime.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTime.java index 35107cdc0..b232abbc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTime.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTime.java @@ -4,17 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + ClusterAutoRestartMessageMaintenanceWindowWindowStartTime + .ClusterAutoRestartMessageMaintenanceWindowWindowStartTimeSerializer.class) +@JsonDeserialize( + using = + ClusterAutoRestartMessageMaintenanceWindowWindowStartTime + .ClusterAutoRestartMessageMaintenanceWindowWindowStartTimeDeserializer.class) public class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime { /** */ - @JsonProperty("hours") private Long hours; /** */ - @JsonProperty("minutes") private Long minutes; public ClusterAutoRestartMessageMaintenanceWindowWindowStartTime setHours(Long hours) { @@ -56,4 +71,49 @@ public String toString() { .add("minutes", minutes) .toString(); } + + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb toPb() { + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb pb = + new ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb(); + pb.setHours(hours); + pb.setMinutes(minutes); + + return pb; + } + + static ClusterAutoRestartMessageMaintenanceWindowWindowStartTime fromPb( + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb pb) { + ClusterAutoRestartMessageMaintenanceWindowWindowStartTime model = + new ClusterAutoRestartMessageMaintenanceWindowWindowStartTime(); + model.setHours(pb.getHours()); + model.setMinutes(pb.getMinutes()); + + return model; + } + + public static class ClusterAutoRestartMessageMaintenanceWindowWindowStartTimeSerializer + extends JsonSerializer { + @Override + public void serialize( + ClusterAutoRestartMessageMaintenanceWindowWindowStartTime value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClusterAutoRestartMessageMaintenanceWindowWindowStartTimeDeserializer + extends JsonDeserializer { + @Override + public ClusterAutoRestartMessageMaintenanceWindowWindowStartTime deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb pb = + mapper.readValue(p, ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb.class); + return ClusterAutoRestartMessageMaintenanceWindowWindowStartTime.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb.java new file mode 100755 index 000000000..bf14b2716 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb { + @JsonProperty("hours") + private Long hours; + + @JsonProperty("minutes") + private Long minutes; + + public ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb setHours(Long hours) { + this.hours = hours; + return this; + } + + public Long getHours() { + return hours; + } + + public ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb setMinutes(Long minutes) { + this.minutes = minutes; + return this; + } + + public Long getMinutes() { + return minutes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb that = + (ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb) o; + return Objects.equals(hours, that.hours) && Objects.equals(minutes, that.minutes); + } + + @Override + public int hashCode() { + return Objects.hash(hours, minutes); + } + + @Override + public String toString() { + return new ToStringer(ClusterAutoRestartMessageMaintenanceWindowWindowStartTimePb.class) + .add("hours", hours) + .add("minutes", minutes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessagePb.java new file mode 100755 index 000000000..42d4d00e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessagePb.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClusterAutoRestartMessagePb { + @JsonProperty("can_toggle") + private Boolean canToggle; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("enablement_details") + private ClusterAutoRestartMessageEnablementDetails enablementDetails; + + @JsonProperty("maintenance_window") + private ClusterAutoRestartMessageMaintenanceWindow maintenanceWindow; + + @JsonProperty("restart_even_if_no_updates_available") + private Boolean restartEvenIfNoUpdatesAvailable; + + public ClusterAutoRestartMessagePb setCanToggle(Boolean canToggle) { + this.canToggle = canToggle; + return this; + } + + public Boolean getCanToggle() { + return canToggle; + } + + public ClusterAutoRestartMessagePb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public ClusterAutoRestartMessagePb setEnablementDetails( + ClusterAutoRestartMessageEnablementDetails enablementDetails) { + this.enablementDetails = enablementDetails; + return this; + } + + public ClusterAutoRestartMessageEnablementDetails getEnablementDetails() { + return enablementDetails; + } + + public ClusterAutoRestartMessagePb setMaintenanceWindow( + ClusterAutoRestartMessageMaintenanceWindow maintenanceWindow) { + this.maintenanceWindow = maintenanceWindow; + return this; + } + + public ClusterAutoRestartMessageMaintenanceWindow getMaintenanceWindow() { + return maintenanceWindow; + } + + public ClusterAutoRestartMessagePb setRestartEvenIfNoUpdatesAvailable( + Boolean restartEvenIfNoUpdatesAvailable) { + this.restartEvenIfNoUpdatesAvailable = restartEvenIfNoUpdatesAvailable; + return this; + } + + public Boolean getRestartEvenIfNoUpdatesAvailable() { + return restartEvenIfNoUpdatesAvailable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterAutoRestartMessagePb that = (ClusterAutoRestartMessagePb) o; + return Objects.equals(canToggle, that.canToggle) + && Objects.equals(enabled, that.enabled) + && Objects.equals(enablementDetails, that.enablementDetails) + && Objects.equals(maintenanceWindow, that.maintenanceWindow) + && Objects.equals(restartEvenIfNoUpdatesAvailable, that.restartEvenIfNoUpdatesAvailable); + } + + @Override + public int hashCode() { + return Objects.hash( + canToggle, enabled, enablementDetails, maintenanceWindow, restartEvenIfNoUpdatesAvailable); + } + + @Override + public String toString() { + return new ToStringer(ClusterAutoRestartMessagePb.class) + .add("canToggle", canToggle) + .add("enabled", enabled) + .add("enablementDetails", enablementDetails) + .add("maintenanceWindow", maintenanceWindow) + .add("restartEvenIfNoUpdatesAvailable", restartEvenIfNoUpdatesAvailable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java index 53a333c2b..617fbf692 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfile.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** SHIELD feature: CSP */ @Generated +@JsonSerialize(using = ComplianceSecurityProfile.ComplianceSecurityProfileSerializer.class) +@JsonDeserialize(using = ComplianceSecurityProfile.ComplianceSecurityProfileDeserializer.class) public class ComplianceSecurityProfile { /** Set by customers when they request Compliance Security Profile (CSP) */ - @JsonProperty("compliance_standards") private Collection complianceStandards; /** */ - @JsonProperty("is_enabled") private Boolean isEnabled; public ComplianceSecurityProfile setComplianceStandards( @@ -59,4 +68,43 @@ public String toString() { .add("isEnabled", isEnabled) .toString(); } + + ComplianceSecurityProfilePb toPb() { + ComplianceSecurityProfilePb pb = new ComplianceSecurityProfilePb(); + pb.setComplianceStandards(complianceStandards); + pb.setIsEnabled(isEnabled); + + return pb; + } + + static ComplianceSecurityProfile fromPb(ComplianceSecurityProfilePb pb) { + ComplianceSecurityProfile model = new ComplianceSecurityProfile(); + model.setComplianceStandards(pb.getComplianceStandards()); + model.setIsEnabled(pb.getIsEnabled()); + + return model; + } + + public static class ComplianceSecurityProfileSerializer + extends JsonSerializer { + @Override + public void serialize( + ComplianceSecurityProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComplianceSecurityProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComplianceSecurityProfileDeserializer + extends JsonDeserializer { + @Override + public ComplianceSecurityProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComplianceSecurityProfilePb pb = mapper.readValue(p, ComplianceSecurityProfilePb.class); + return ComplianceSecurityProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java index 59531da21..f797fb18d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java @@ -21,7 +21,7 @@ public ComplianceSecurityProfileSetting get(GetComplianceSecurityProfileSettingR String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ComplianceSecurityProfileSetting.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public ComplianceSecurityProfileSetting update( String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ComplianceSecurityProfileSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfilePb.java new file mode 100755 index 000000000..0a5fa3f8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfilePb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** SHIELD feature: CSP */ +@Generated +class ComplianceSecurityProfilePb { + @JsonProperty("compliance_standards") + private Collection complianceStandards; + + @JsonProperty("is_enabled") + private Boolean isEnabled; + + public ComplianceSecurityProfilePb setComplianceStandards( + Collection complianceStandards) { + this.complianceStandards = complianceStandards; + return this; + } + + public Collection getComplianceStandards() { + return complianceStandards; + } + + public ComplianceSecurityProfilePb setIsEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled; + return this; + } + + public Boolean getIsEnabled() { + return isEnabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComplianceSecurityProfilePb that = (ComplianceSecurityProfilePb) o; + return Objects.equals(complianceStandards, that.complianceStandards) + && Objects.equals(isEnabled, that.isEnabled); + } + + @Override + public int hashCode() { + return Objects.hash(complianceStandards, isEnabled); + } + + @Override + public String toString() { + return new ToStringer(ComplianceSecurityProfilePb.class) + .add("complianceStandards", complianceStandards) + .add("isEnabled", isEnabled) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java index 266faa291..fe781e202 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ComplianceSecurityProfileSetting.ComplianceSecurityProfileSettingSerializer.class) +@JsonDeserialize( + using = ComplianceSecurityProfileSetting.ComplianceSecurityProfileSettingDeserializer.class) public class ComplianceSecurityProfileSetting { /** SHIELD feature: CSP */ - @JsonProperty("compliance_security_profile_workspace") private ComplianceSecurityProfile complianceSecurityProfileWorkspace; /** @@ -21,7 +33,6 @@ public class ComplianceSecurityProfileSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +41,6 @@ public class ComplianceSecurityProfileSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public ComplianceSecurityProfileSetting setComplianceSecurityProfileWorkspace( @@ -85,4 +95,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + ComplianceSecurityProfileSettingPb toPb() { + ComplianceSecurityProfileSettingPb pb = new ComplianceSecurityProfileSettingPb(); + pb.setComplianceSecurityProfileWorkspace(complianceSecurityProfileWorkspace); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static ComplianceSecurityProfileSetting fromPb(ComplianceSecurityProfileSettingPb pb) { + ComplianceSecurityProfileSetting model = new ComplianceSecurityProfileSetting(); + model.setComplianceSecurityProfileWorkspace(pb.getComplianceSecurityProfileWorkspace()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class ComplianceSecurityProfileSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + ComplianceSecurityProfileSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ComplianceSecurityProfileSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ComplianceSecurityProfileSettingDeserializer + extends JsonDeserializer { + @Override + public ComplianceSecurityProfileSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ComplianceSecurityProfileSettingPb pb = + mapper.readValue(p, ComplianceSecurityProfileSettingPb.class); + return ComplianceSecurityProfileSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSettingPb.java new file mode 100755 index 000000000..c1db5175d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSettingPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ComplianceSecurityProfileSettingPb { + @JsonProperty("compliance_security_profile_workspace") + private ComplianceSecurityProfile complianceSecurityProfileWorkspace; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public ComplianceSecurityProfileSettingPb setComplianceSecurityProfileWorkspace( + ComplianceSecurityProfile complianceSecurityProfileWorkspace) { + this.complianceSecurityProfileWorkspace = complianceSecurityProfileWorkspace; + return this; + } + + public ComplianceSecurityProfile getComplianceSecurityProfileWorkspace() { + return complianceSecurityProfileWorkspace; + } + + public ComplianceSecurityProfileSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public ComplianceSecurityProfileSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComplianceSecurityProfileSettingPb that = (ComplianceSecurityProfileSettingPb) o; + return Objects.equals( + complianceSecurityProfileWorkspace, that.complianceSecurityProfileWorkspace) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(complianceSecurityProfileWorkspace, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(ComplianceSecurityProfileSettingPb.class) + .add("complianceSecurityProfileWorkspace", complianceSecurityProfileWorkspace) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.java index cb98e699f..735570bff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Config.ConfigSerializer.class) +@JsonDeserialize(using = Config.ConfigDeserializer.class) public class Config { /** */ - @JsonProperty("email") private EmailConfig email; /** */ - @JsonProperty("generic_webhook") private GenericWebhookConfig genericWebhook; /** */ - @JsonProperty("microsoft_teams") private MicrosoftTeamsConfig microsoftTeams; /** */ - @JsonProperty("pagerduty") private PagerdutyConfig pagerduty; /** */ - @JsonProperty("slack") private SlackConfig slack; public Config setEmail(EmailConfig email) { @@ -101,4 +107,45 @@ public String toString() { .add("slack", slack) .toString(); } + + ConfigPb toPb() { + ConfigPb pb = new ConfigPb(); + pb.setEmail(email); + pb.setGenericWebhook(genericWebhook); + pb.setMicrosoftTeams(microsoftTeams); + pb.setPagerduty(pagerduty); + pb.setSlack(slack); + + return pb; + } + + static Config fromPb(ConfigPb pb) { + Config model = new Config(); + model.setEmail(pb.getEmail()); + model.setGenericWebhook(pb.getGenericWebhook()); + model.setMicrosoftTeams(pb.getMicrosoftTeams()); + model.setPagerduty(pb.getPagerduty()); + model.setSlack(pb.getSlack()); + + return model; + } + + public static class ConfigSerializer extends JsonSerializer { + @Override + public void serialize(Config value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ConfigDeserializer extends JsonDeserializer { + @Override + public Config deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ConfigPb pb = mapper.readValue(p, ConfigPb.class); + return Config.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ConfigPb.java new file mode 100755 index 000000000..ee854154c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ConfigPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ConfigPb { + @JsonProperty("email") + private EmailConfig email; + + @JsonProperty("generic_webhook") + private GenericWebhookConfig genericWebhook; + + @JsonProperty("microsoft_teams") + private MicrosoftTeamsConfig microsoftTeams; + + @JsonProperty("pagerduty") + private PagerdutyConfig pagerduty; + + @JsonProperty("slack") + private SlackConfig slack; + + public ConfigPb setEmail(EmailConfig email) { + this.email = email; + return this; + } + + public EmailConfig getEmail() { + return email; + } + + public ConfigPb setGenericWebhook(GenericWebhookConfig genericWebhook) { + this.genericWebhook = genericWebhook; + return this; + } + + public GenericWebhookConfig getGenericWebhook() { + return genericWebhook; + } + + public ConfigPb setMicrosoftTeams(MicrosoftTeamsConfig microsoftTeams) { + this.microsoftTeams = microsoftTeams; + return this; + } + + public MicrosoftTeamsConfig getMicrosoftTeams() { + return microsoftTeams; + } + + public ConfigPb setPagerduty(PagerdutyConfig pagerduty) { + this.pagerduty = pagerduty; + return this; + } + + public PagerdutyConfig getPagerduty() { + return pagerduty; + } + + public ConfigPb setSlack(SlackConfig slack) { + this.slack = slack; + return this; + } + + public SlackConfig getSlack() { + return slack; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigPb that = (ConfigPb) o; + return Objects.equals(email, that.email) + && Objects.equals(genericWebhook, that.genericWebhook) + && Objects.equals(microsoftTeams, that.microsoftTeams) + && Objects.equals(pagerduty, that.pagerduty) + && Objects.equals(slack, that.slack); + } + + @Override + public int hashCode() { + return Objects.hash(email, genericWebhook, microsoftTeams, pagerduty, slack); + } + + @Override + public String toString() { + return new ToStringer(ConfigPb.class) + .add("email", email) + .add("genericWebhook", genericWebhook) + .add("microsoftTeams", microsoftTeams) + .add("pagerduty", pagerduty) + .add("slack", slack) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Converters.java new file mode 100755 index 000000000..b70e3a394 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.settings; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java index 0cfbedf56..ee36e4835 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Details required to configure a block list or allow list. */ @Generated +@JsonSerialize(using = CreateIpAccessList.CreateIpAccessListSerializer.class) +@JsonDeserialize(using = CreateIpAccessList.CreateIpAccessListDeserializer.class) public class CreateIpAccessList { /** */ - @JsonProperty("ip_addresses") private Collection ipAddresses; /** Label for the IP access list. This **cannot** be empty. */ - @JsonProperty("label") private String label; /** @@ -26,7 +35,6 @@ public class CreateIpAccessList { * or range. IP addresses in the block list are excluded even if they are included in an allow * list. */ - @JsonProperty("list_type") private ListType listType; public CreateIpAccessList setIpAddresses(Collection ipAddresses) { @@ -79,4 +87,42 @@ public String toString() { .add("listType", listType) .toString(); } + + CreateIpAccessListPb toPb() { + CreateIpAccessListPb pb = new CreateIpAccessListPb(); + pb.setIpAddresses(ipAddresses); + pb.setLabel(label); + pb.setListType(listType); + + return pb; + } + + static CreateIpAccessList fromPb(CreateIpAccessListPb pb) { + CreateIpAccessList model = new CreateIpAccessList(); + model.setIpAddresses(pb.getIpAddresses()); + model.setLabel(pb.getLabel()); + model.setListType(pb.getListType()); + + return model; + } + + public static class CreateIpAccessListSerializer extends JsonSerializer { + @Override + public void serialize(CreateIpAccessList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateIpAccessListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateIpAccessListDeserializer extends JsonDeserializer { + @Override + public CreateIpAccessList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateIpAccessListPb pb = mapper.readValue(p, CreateIpAccessListPb.class); + return CreateIpAccessList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListPb.java new file mode 100755 index 000000000..7733fd100 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Details required to configure a block list or allow list. */ +@Generated +class CreateIpAccessListPb { + @JsonProperty("ip_addresses") + private Collection ipAddresses; + + @JsonProperty("label") + private String label; + + @JsonProperty("list_type") + private ListType listType; + + public CreateIpAccessListPb setIpAddresses(Collection ipAddresses) { + this.ipAddresses = ipAddresses; + return this; + } + + public Collection getIpAddresses() { + return ipAddresses; + } + + public CreateIpAccessListPb setLabel(String label) { + this.label = label; + return this; + } + + public String getLabel() { + return label; + } + + public CreateIpAccessListPb setListType(ListType listType) { + this.listType = listType; + return this; + } + + public ListType getListType() { + return listType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateIpAccessListPb that = (CreateIpAccessListPb) o; + return Objects.equals(ipAddresses, that.ipAddresses) + && Objects.equals(label, that.label) + && Objects.equals(listType, that.listType); + } + + @Override + public int hashCode() { + return Objects.hash(ipAddresses, label, listType); + } + + @Override + public String toString() { + return new ToStringer(CreateIpAccessListPb.class) + .add("ipAddresses", ipAddresses) + .add("label", label) + .add("listType", listType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java index dba141ff5..63f6b6203 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** An IP access list was successfully created. */ @Generated +@JsonSerialize(using = CreateIpAccessListResponse.CreateIpAccessListResponseSerializer.class) +@JsonDeserialize(using = CreateIpAccessListResponse.CreateIpAccessListResponseDeserializer.class) public class CreateIpAccessListResponse { /** Definition of an IP Access list */ - @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; public CreateIpAccessListResponse setIpAccessList(IpAccessListInfo ipAccessList) { @@ -42,4 +52,41 @@ public String toString() { .add("ipAccessList", ipAccessList) .toString(); } + + CreateIpAccessListResponsePb toPb() { + CreateIpAccessListResponsePb pb = new CreateIpAccessListResponsePb(); + pb.setIpAccessList(ipAccessList); + + return pb; + } + + static CreateIpAccessListResponse fromPb(CreateIpAccessListResponsePb pb) { + CreateIpAccessListResponse model = new CreateIpAccessListResponse(); + model.setIpAccessList(pb.getIpAccessList()); + + return model; + } + + public static class CreateIpAccessListResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateIpAccessListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateIpAccessListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateIpAccessListResponseDeserializer + extends JsonDeserializer { + @Override + public CreateIpAccessListResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateIpAccessListResponsePb pb = mapper.readValue(p, CreateIpAccessListResponsePb.class); + return CreateIpAccessListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponsePb.java new file mode 100755 index 000000000..08f1ffe18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** An IP access list was successfully created. */ +@Generated +class CreateIpAccessListResponsePb { + @JsonProperty("ip_access_list") + private IpAccessListInfo ipAccessList; + + public CreateIpAccessListResponsePb setIpAccessList(IpAccessListInfo ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessListInfo getIpAccessList() { + return ipAccessList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateIpAccessListResponsePb that = (CreateIpAccessListResponsePb) o; + return Objects.equals(ipAccessList, that.ipAccessList); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessList); + } + + @Override + public String toString() { + return new ToStringer(CreateIpAccessListResponsePb.class) + .add("ipAccessList", ipAccessList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java index 3188d16c2..996ddf9f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create a network connectivity configuration */ @Generated +@JsonSerialize( + using = + CreateNetworkConnectivityConfigRequest.CreateNetworkConnectivityConfigRequestSerializer + .class) +@JsonDeserialize( + using = + CreateNetworkConnectivityConfigRequest.CreateNetworkConnectivityConfigRequestDeserializer + .class) public class CreateNetworkConnectivityConfigRequest { /** Properties of the new network connectivity configuration. */ - @JsonProperty("network_connectivity_config") private CreateNetworkConnectivityConfiguration networkConnectivityConfig; public CreateNetworkConnectivityConfigRequest setNetworkConnectivityConfig( @@ -43,4 +59,45 @@ public String toString() { .add("networkConnectivityConfig", networkConnectivityConfig) .toString(); } + + CreateNetworkConnectivityConfigRequestPb toPb() { + CreateNetworkConnectivityConfigRequestPb pb = new CreateNetworkConnectivityConfigRequestPb(); + pb.setNetworkConnectivityConfig(networkConnectivityConfig); + + return pb; + } + + static CreateNetworkConnectivityConfigRequest fromPb( + CreateNetworkConnectivityConfigRequestPb pb) { + CreateNetworkConnectivityConfigRequest model = new CreateNetworkConnectivityConfigRequest(); + model.setNetworkConnectivityConfig(pb.getNetworkConnectivityConfig()); + + return model; + } + + public static class CreateNetworkConnectivityConfigRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateNetworkConnectivityConfigRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateNetworkConnectivityConfigRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateNetworkConnectivityConfigRequestDeserializer + extends JsonDeserializer { + @Override + public CreateNetworkConnectivityConfigRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateNetworkConnectivityConfigRequestPb pb = + mapper.readValue(p, CreateNetworkConnectivityConfigRequestPb.class); + return CreateNetworkConnectivityConfigRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequestPb.java new file mode 100755 index 000000000..9593bbcb1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a network connectivity configuration */ +@Generated +class CreateNetworkConnectivityConfigRequestPb { + @JsonProperty("network_connectivity_config") + private CreateNetworkConnectivityConfiguration networkConnectivityConfig; + + public CreateNetworkConnectivityConfigRequestPb setNetworkConnectivityConfig( + CreateNetworkConnectivityConfiguration networkConnectivityConfig) { + this.networkConnectivityConfig = networkConnectivityConfig; + return this; + } + + public CreateNetworkConnectivityConfiguration getNetworkConnectivityConfig() { + return networkConnectivityConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNetworkConnectivityConfigRequestPb that = (CreateNetworkConnectivityConfigRequestPb) o; + return Objects.equals(networkConnectivityConfig, that.networkConnectivityConfig); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfig); + } + + @Override + public String toString() { + return new ToStringer(CreateNetworkConnectivityConfigRequestPb.class) + .add("networkConnectivityConfig", networkConnectivityConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java index d8d868468..4a1b41ebe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java @@ -4,25 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Properties of the new network connectivity configuration. */ @Generated +@JsonSerialize( + using = + CreateNetworkConnectivityConfiguration.CreateNetworkConnectivityConfigurationSerializer + .class) +@JsonDeserialize( + using = + CreateNetworkConnectivityConfiguration.CreateNetworkConnectivityConfigurationDeserializer + .class) public class CreateNetworkConnectivityConfiguration { /** * The name of the network connectivity configuration. The name can contain alphanumeric * characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name * must match the regular expression ^[0-9a-zA-Z-_]{3,30}$ */ - @JsonProperty("name") private String name; /** * The region for the network connectivity configuration. Only workspaces in the same region can * be attached to the network connectivity configuration. */ - @JsonProperty("region") private String region; public CreateNetworkConnectivityConfiguration setName(String name) { @@ -63,4 +78,47 @@ public String toString() { .add("region", region) .toString(); } + + CreateNetworkConnectivityConfigurationPb toPb() { + CreateNetworkConnectivityConfigurationPb pb = new CreateNetworkConnectivityConfigurationPb(); + pb.setName(name); + pb.setRegion(region); + + return pb; + } + + static CreateNetworkConnectivityConfiguration fromPb( + CreateNetworkConnectivityConfigurationPb pb) { + CreateNetworkConnectivityConfiguration model = new CreateNetworkConnectivityConfiguration(); + model.setName(pb.getName()); + model.setRegion(pb.getRegion()); + + return model; + } + + public static class CreateNetworkConnectivityConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateNetworkConnectivityConfiguration value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateNetworkConnectivityConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateNetworkConnectivityConfigurationDeserializer + extends JsonDeserializer { + @Override + public CreateNetworkConnectivityConfiguration deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateNetworkConnectivityConfigurationPb pb = + mapper.readValue(p, CreateNetworkConnectivityConfigurationPb.class); + return CreateNetworkConnectivityConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigurationPb.java new file mode 100755 index 000000000..5238d3ecf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigurationPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Properties of the new network connectivity configuration. */ +@Generated +class CreateNetworkConnectivityConfigurationPb { + @JsonProperty("name") + private String name; + + @JsonProperty("region") + private String region; + + public CreateNetworkConnectivityConfigurationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateNetworkConnectivityConfigurationPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNetworkConnectivityConfigurationPb that = (CreateNetworkConnectivityConfigurationPb) o; + return Objects.equals(name, that.name) && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash(name, region); + } + + @Override + public String toString() { + return new ToStringer(CreateNetworkConnectivityConfigurationPb.class) + .add("name", name) + .add("region", region) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java index 8afee90dd..8ada941e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create a network policy */ @Generated +@JsonSerialize(using = CreateNetworkPolicyRequest.CreateNetworkPolicyRequestSerializer.class) +@JsonDeserialize(using = CreateNetworkPolicyRequest.CreateNetworkPolicyRequestDeserializer.class) public class CreateNetworkPolicyRequest { /** */ - @JsonProperty("network_policy") private AccountNetworkPolicy networkPolicy; public CreateNetworkPolicyRequest setNetworkPolicy(AccountNetworkPolicy networkPolicy) { @@ -42,4 +52,41 @@ public String toString() { .add("networkPolicy", networkPolicy) .toString(); } + + CreateNetworkPolicyRequestPb toPb() { + CreateNetworkPolicyRequestPb pb = new CreateNetworkPolicyRequestPb(); + pb.setNetworkPolicy(networkPolicy); + + return pb; + } + + static CreateNetworkPolicyRequest fromPb(CreateNetworkPolicyRequestPb pb) { + CreateNetworkPolicyRequest model = new CreateNetworkPolicyRequest(); + model.setNetworkPolicy(pb.getNetworkPolicy()); + + return model; + } + + public static class CreateNetworkPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateNetworkPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateNetworkPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateNetworkPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateNetworkPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateNetworkPolicyRequestPb pb = mapper.readValue(p, CreateNetworkPolicyRequestPb.class); + return CreateNetworkPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequestPb.java new file mode 100755 index 000000000..6bf7d3a75 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a network policy */ +@Generated +class CreateNetworkPolicyRequestPb { + @JsonProperty("network_policy") + private AccountNetworkPolicy networkPolicy; + + public CreateNetworkPolicyRequestPb setNetworkPolicy(AccountNetworkPolicy networkPolicy) { + this.networkPolicy = networkPolicy; + return this; + } + + public AccountNetworkPolicy getNetworkPolicy() { + return networkPolicy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNetworkPolicyRequestPb that = (CreateNetworkPolicyRequestPb) o; + return Objects.equals(networkPolicy, that.networkPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicy); + } + + @Override + public String toString() { + return new ToStringer(CreateNetworkPolicyRequestPb.class) + .add("networkPolicy", networkPolicy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java index 068175aec..73a2ee5db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java @@ -4,20 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateNotificationDestinationRequest.CreateNotificationDestinationRequestSerializer.class) +@JsonDeserialize( + using = + CreateNotificationDestinationRequest.CreateNotificationDestinationRequestDeserializer.class) public class CreateNotificationDestinationRequest { /** * The configuration for the notification destination. Must wrap EXACTLY one of the nested * configs. */ - @JsonProperty("config") private Config config; /** The display name for the notification destination. */ - @JsonProperty("display_name") private String displayName; public CreateNotificationDestinationRequest setConfig(Config config) { @@ -58,4 +71,44 @@ public String toString() { .add("displayName", displayName) .toString(); } + + CreateNotificationDestinationRequestPb toPb() { + CreateNotificationDestinationRequestPb pb = new CreateNotificationDestinationRequestPb(); + pb.setConfig(config); + pb.setDisplayName(displayName); + + return pb; + } + + static CreateNotificationDestinationRequest fromPb(CreateNotificationDestinationRequestPb pb) { + CreateNotificationDestinationRequest model = new CreateNotificationDestinationRequest(); + model.setConfig(pb.getConfig()); + model.setDisplayName(pb.getDisplayName()); + + return model; + } + + public static class CreateNotificationDestinationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateNotificationDestinationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateNotificationDestinationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateNotificationDestinationRequestDeserializer + extends JsonDeserializer { + @Override + public CreateNotificationDestinationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateNotificationDestinationRequestPb pb = + mapper.readValue(p, CreateNotificationDestinationRequestPb.class); + return CreateNotificationDestinationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequestPb.java new file mode 100755 index 000000000..8d5c5cde8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateNotificationDestinationRequestPb { + @JsonProperty("config") + private Config config; + + @JsonProperty("display_name") + private String displayName; + + public CreateNotificationDestinationRequestPb setConfig(Config config) { + this.config = config; + return this; + } + + public Config getConfig() { + return config; + } + + public CreateNotificationDestinationRequestPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNotificationDestinationRequestPb that = (CreateNotificationDestinationRequestPb) o; + return Objects.equals(config, that.config) && Objects.equals(displayName, that.displayName); + } + + @Override + public int hashCode() { + return Objects.hash(config, displayName); + } + + @Override + public String toString() { + return new ToStringer(CreateNotificationDestinationRequestPb.class) + .add("config", config) + .add("displayName", displayName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java index 4ae7809b5..792b3f797 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Configuration details for creating on-behalf tokens. */ @Generated +@JsonSerialize(using = CreateOboTokenRequest.CreateOboTokenRequestSerializer.class) +@JsonDeserialize(using = CreateOboTokenRequest.CreateOboTokenRequestDeserializer.class) public class CreateOboTokenRequest { /** Application ID of the service principal. */ - @JsonProperty("application_id") private String applicationId; /** Comment that describes the purpose of the token. */ - @JsonProperty("comment") private String comment; /** The number of seconds before the token expires. */ - @JsonProperty("lifetime_seconds") private Long lifetimeSeconds; public CreateOboTokenRequest setApplicationId(String applicationId) { @@ -72,4 +80,45 @@ public String toString() { .add("lifetimeSeconds", lifetimeSeconds) .toString(); } + + CreateOboTokenRequestPb toPb() { + CreateOboTokenRequestPb pb = new CreateOboTokenRequestPb(); + pb.setApplicationId(applicationId); + pb.setComment(comment); + pb.setLifetimeSeconds(lifetimeSeconds); + + return pb; + } + + static CreateOboTokenRequest fromPb(CreateOboTokenRequestPb pb) { + CreateOboTokenRequest model = new CreateOboTokenRequest(); + model.setApplicationId(pb.getApplicationId()); + model.setComment(pb.getComment()); + model.setLifetimeSeconds(pb.getLifetimeSeconds()); + + return model; + } + + public static class CreateOboTokenRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateOboTokenRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateOboTokenRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateOboTokenRequestDeserializer + extends JsonDeserializer { + @Override + public CreateOboTokenRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateOboTokenRequestPb pb = mapper.readValue(p, CreateOboTokenRequestPb.class); + return CreateOboTokenRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequestPb.java new file mode 100755 index 000000000..357036691 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configuration details for creating on-behalf tokens. */ +@Generated +class CreateOboTokenRequestPb { + @JsonProperty("application_id") + private String applicationId; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("lifetime_seconds") + private Long lifetimeSeconds; + + public CreateOboTokenRequestPb setApplicationId(String applicationId) { + this.applicationId = applicationId; + return this; + } + + public String getApplicationId() { + return applicationId; + } + + public CreateOboTokenRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateOboTokenRequestPb setLifetimeSeconds(Long lifetimeSeconds) { + this.lifetimeSeconds = lifetimeSeconds; + return this; + } + + public Long getLifetimeSeconds() { + return lifetimeSeconds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateOboTokenRequestPb that = (CreateOboTokenRequestPb) o; + return Objects.equals(applicationId, that.applicationId) + && Objects.equals(comment, that.comment) + && Objects.equals(lifetimeSeconds, that.lifetimeSeconds); + } + + @Override + public int hashCode() { + return Objects.hash(applicationId, comment, lifetimeSeconds); + } + + @Override + public String toString() { + return new ToStringer(CreateOboTokenRequestPb.class) + .add("applicationId", applicationId) + .add("comment", comment) + .add("lifetimeSeconds", lifetimeSeconds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java index 2b9157a11..6de5006ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** An on-behalf token was successfully created for the service principal. */ @Generated +@JsonSerialize(using = CreateOboTokenResponse.CreateOboTokenResponseSerializer.class) +@JsonDeserialize(using = CreateOboTokenResponse.CreateOboTokenResponseDeserializer.class) public class CreateOboTokenResponse { /** */ - @JsonProperty("token_info") private TokenInfo tokenInfo; /** Value of the token. */ - @JsonProperty("token_value") private String tokenValue; public CreateOboTokenResponse setTokenInfo(TokenInfo tokenInfo) { @@ -56,4 +65,43 @@ public String toString() { .add("tokenValue", tokenValue) .toString(); } + + CreateOboTokenResponsePb toPb() { + CreateOboTokenResponsePb pb = new CreateOboTokenResponsePb(); + pb.setTokenInfo(tokenInfo); + pb.setTokenValue(tokenValue); + + return pb; + } + + static CreateOboTokenResponse fromPb(CreateOboTokenResponsePb pb) { + CreateOboTokenResponse model = new CreateOboTokenResponse(); + model.setTokenInfo(pb.getTokenInfo()); + model.setTokenValue(pb.getTokenValue()); + + return model; + } + + public static class CreateOboTokenResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateOboTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateOboTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateOboTokenResponseDeserializer + extends JsonDeserializer { + @Override + public CreateOboTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateOboTokenResponsePb pb = mapper.readValue(p, CreateOboTokenResponsePb.class); + return CreateOboTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponsePb.java new file mode 100755 index 000000000..ae58fd1fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** An on-behalf token was successfully created for the service principal. */ +@Generated +class CreateOboTokenResponsePb { + @JsonProperty("token_info") + private TokenInfo tokenInfo; + + @JsonProperty("token_value") + private String tokenValue; + + public CreateOboTokenResponsePb setTokenInfo(TokenInfo tokenInfo) { + this.tokenInfo = tokenInfo; + return this; + } + + public TokenInfo getTokenInfo() { + return tokenInfo; + } + + public CreateOboTokenResponsePb setTokenValue(String tokenValue) { + this.tokenValue = tokenValue; + return this; + } + + public String getTokenValue() { + return tokenValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateOboTokenResponsePb that = (CreateOboTokenResponsePb) o; + return Objects.equals(tokenInfo, that.tokenInfo) && Objects.equals(tokenValue, that.tokenValue); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfo, tokenValue); + } + + @Override + public String toString() { + return new ToStringer(CreateOboTokenResponsePb.class) + .add("tokenInfo", tokenInfo) + .add("tokenValue", tokenValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java index ea50df387..7415db684 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,30 +22,44 @@ * portal after initialization. */ @Generated +@JsonSerialize(using = CreatePrivateEndpointRule.CreatePrivateEndpointRuleSerializer.class) +@JsonDeserialize(using = CreatePrivateEndpointRule.CreatePrivateEndpointRuleDeserializer.class) public class CreatePrivateEndpointRule { /** - * Only used by private endpoints to customer-managed resources. + * Only used by private endpoints to customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. */ - @JsonProperty("domain_names") private Collection domainNames; /** - * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - * mysqlServer + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + private String endpointService; + + /** + * Not used by customer-managed private endpoint services. * *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. */ - @JsonProperty("group_id") private String groupId; /** The Azure resource ID of the target resource. */ - @JsonProperty("resource_id") private String resourceId; + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + private Collection resourceNames; + public CreatePrivateEndpointRule setDomainNames(Collection domainNames) { this.domainNames = domainNames; return this; @@ -46,6 +69,15 @@ public Collection getDomainNames() { return domainNames; } + public CreatePrivateEndpointRule setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + public CreatePrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -64,27 +96,85 @@ public String getResourceId() { return resourceId; } + public CreatePrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreatePrivateEndpointRule that = (CreatePrivateEndpointRule) o; return Objects.equals(domainNames, that.domainNames) + && Objects.equals(endpointService, that.endpointService) && Objects.equals(groupId, that.groupId) - && Objects.equals(resourceId, that.resourceId); + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames, groupId, resourceId); + return Objects.hash(domainNames, endpointService, groupId, resourceId, resourceNames); } @Override public String toString() { return new ToStringer(CreatePrivateEndpointRule.class) .add("domainNames", domainNames) + .add("endpointService", endpointService) .add("groupId", groupId) .add("resourceId", resourceId) + .add("resourceNames", resourceNames) .toString(); } + + CreatePrivateEndpointRulePb toPb() { + CreatePrivateEndpointRulePb pb = new CreatePrivateEndpointRulePb(); + pb.setDomainNames(domainNames); + pb.setEndpointService(endpointService); + pb.setGroupId(groupId); + pb.setResourceId(resourceId); + pb.setResourceNames(resourceNames); + + return pb; + } + + static CreatePrivateEndpointRule fromPb(CreatePrivateEndpointRulePb pb) { + CreatePrivateEndpointRule model = new CreatePrivateEndpointRule(); + model.setDomainNames(pb.getDomainNames()); + model.setEndpointService(pb.getEndpointService()); + model.setGroupId(pb.getGroupId()); + model.setResourceId(pb.getResourceId()); + model.setResourceNames(pb.getResourceNames()); + + return model; + } + + public static class CreatePrivateEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePrivateEndpointRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePrivateEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePrivateEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public CreatePrivateEndpointRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePrivateEndpointRulePb pb = mapper.readValue(p, CreatePrivateEndpointRulePb.class); + return CreatePrivateEndpointRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRulePb.java new file mode 100755 index 000000000..b1894203e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRulePb.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +class CreatePrivateEndpointRulePb { + @JsonProperty("domain_names") + private Collection domainNames; + + @JsonProperty("endpoint_service") + private String endpointService; + + @JsonProperty("group_id") + private String groupId; + + @JsonProperty("resource_id") + private String resourceId; + + @JsonProperty("resource_names") + private Collection resourceNames; + + public CreatePrivateEndpointRulePb setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public CreatePrivateEndpointRulePb setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public CreatePrivateEndpointRulePb setGroupId(String groupId) { + this.groupId = groupId; + return this; + } + + public String getGroupId() { + return groupId; + } + + public CreatePrivateEndpointRulePb setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public CreatePrivateEndpointRulePb setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePrivateEndpointRulePb that = (CreatePrivateEndpointRulePb) o; + return Objects.equals(domainNames, that.domainNames) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(groupId, that.groupId) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames); + } + + @Override + public int hashCode() { + return Objects.hash(domainNames, endpointService, groupId, resourceId, resourceNames); + } + + @Override + public String toString() { + return new ToStringer(CreatePrivateEndpointRulePb.class) + .add("domainNames", domainNames) + .add("endpointService", endpointService) + .add("groupId", groupId) + .add("resourceId", resourceId) + .add("resourceNames", resourceNames) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java index 1afe88442..bd9af8183 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java @@ -4,21 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create a private endpoint rule */ @Generated +@JsonSerialize( + using = CreatePrivateEndpointRuleRequest.CreatePrivateEndpointRuleRequestSerializer.class) +@JsonDeserialize( + using = CreatePrivateEndpointRuleRequest.CreatePrivateEndpointRuleRequestDeserializer.class) public class CreatePrivateEndpointRuleRequest { /** Your Network Connectivity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; /** * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure * portal after initialization. */ - @JsonProperty("private_endpoint_rule") private CreatePrivateEndpointRule privateEndpointRule; public CreatePrivateEndpointRuleRequest setNetworkConnectivityConfigId( @@ -62,4 +73,44 @@ public String toString() { .add("privateEndpointRule", privateEndpointRule) .toString(); } + + CreatePrivateEndpointRuleRequestPb toPb() { + CreatePrivateEndpointRuleRequestPb pb = new CreatePrivateEndpointRuleRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setPrivateEndpointRule(privateEndpointRule); + + return pb; + } + + static CreatePrivateEndpointRuleRequest fromPb(CreatePrivateEndpointRuleRequestPb pb) { + CreatePrivateEndpointRuleRequest model = new CreatePrivateEndpointRuleRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setPrivateEndpointRule(pb.getPrivateEndpointRule()); + + return model; + } + + public static class CreatePrivateEndpointRuleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreatePrivateEndpointRuleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreatePrivateEndpointRuleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreatePrivateEndpointRuleRequestDeserializer + extends JsonDeserializer { + @Override + public CreatePrivateEndpointRuleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreatePrivateEndpointRuleRequestPb pb = + mapper.readValue(p, CreatePrivateEndpointRuleRequestPb.class); + return CreatePrivateEndpointRuleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestPb.java new file mode 100755 index 000000000..9b1a3dff4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a private endpoint rule */ +@Generated +class CreatePrivateEndpointRuleRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + @JsonProperty("private_endpoint_rule") + private CreatePrivateEndpointRule privateEndpointRule; + + public CreatePrivateEndpointRuleRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public CreatePrivateEndpointRuleRequestPb setPrivateEndpointRule( + CreatePrivateEndpointRule privateEndpointRule) { + this.privateEndpointRule = privateEndpointRule; + return this; + } + + public CreatePrivateEndpointRule getPrivateEndpointRule() { + return privateEndpointRule; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePrivateEndpointRuleRequestPb that = (CreatePrivateEndpointRuleRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(privateEndpointRule, that.privateEndpointRule); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId, privateEndpointRule); + } + + @Override + public String toString() { + return new ToStringer(CreatePrivateEndpointRuleRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("privateEndpointRule", privateEndpointRule) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java index 79f4feea2..574649ce7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateTokenRequest.CreateTokenRequestSerializer.class) +@JsonDeserialize(using = CreateTokenRequest.CreateTokenRequestDeserializer.class) public class CreateTokenRequest { /** Optional description to attach to the token. */ - @JsonProperty("comment") private String comment; /** @@ -18,7 +28,6 @@ public class CreateTokenRequest { * *

If the lifetime is not specified, this token remains valid indefinitely. */ - @JsonProperty("lifetime_seconds") private Long lifetimeSeconds; public CreateTokenRequest setComment(String comment) { @@ -60,4 +69,40 @@ public String toString() { .add("lifetimeSeconds", lifetimeSeconds) .toString(); } + + CreateTokenRequestPb toPb() { + CreateTokenRequestPb pb = new CreateTokenRequestPb(); + pb.setComment(comment); + pb.setLifetimeSeconds(lifetimeSeconds); + + return pb; + } + + static CreateTokenRequest fromPb(CreateTokenRequestPb pb) { + CreateTokenRequest model = new CreateTokenRequest(); + model.setComment(pb.getComment()); + model.setLifetimeSeconds(pb.getLifetimeSeconds()); + + return model; + } + + public static class CreateTokenRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateTokenRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateTokenRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateTokenRequestDeserializer extends JsonDeserializer { + @Override + public CreateTokenRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateTokenRequestPb pb = mapper.readValue(p, CreateTokenRequestPb.class); + return CreateTokenRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequestPb.java new file mode 100755 index 000000000..77ea4a921 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateTokenRequestPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("lifetime_seconds") + private Long lifetimeSeconds; + + public CreateTokenRequestPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateTokenRequestPb setLifetimeSeconds(Long lifetimeSeconds) { + this.lifetimeSeconds = lifetimeSeconds; + return this; + } + + public Long getLifetimeSeconds() { + return lifetimeSeconds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTokenRequestPb that = (CreateTokenRequestPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(lifetimeSeconds, that.lifetimeSeconds); + } + + @Override + public int hashCode() { + return Objects.hash(comment, lifetimeSeconds); + } + + @Override + public String toString() { + return new ToStringer(CreateTokenRequestPb.class) + .add("comment", comment) + .add("lifetimeSeconds", lifetimeSeconds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponse.java index 278798566..f79aa4baa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateTokenResponse.CreateTokenResponseSerializer.class) +@JsonDeserialize(using = CreateTokenResponse.CreateTokenResponseDeserializer.class) public class CreateTokenResponse { /** The information for the new token. */ - @JsonProperty("token_info") private PublicTokenInfo tokenInfo; /** The value of the new token. */ - @JsonProperty("token_value") private String tokenValue; public CreateTokenResponse setTokenInfo(PublicTokenInfo tokenInfo) { @@ -55,4 +64,41 @@ public String toString() { .add("tokenValue", tokenValue) .toString(); } + + CreateTokenResponsePb toPb() { + CreateTokenResponsePb pb = new CreateTokenResponsePb(); + pb.setTokenInfo(tokenInfo); + pb.setTokenValue(tokenValue); + + return pb; + } + + static CreateTokenResponse fromPb(CreateTokenResponsePb pb) { + CreateTokenResponse model = new CreateTokenResponse(); + model.setTokenInfo(pb.getTokenInfo()); + model.setTokenValue(pb.getTokenValue()); + + return model; + } + + public static class CreateTokenResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateTokenResponseDeserializer + extends JsonDeserializer { + @Override + public CreateTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateTokenResponsePb pb = mapper.readValue(p, CreateTokenResponsePb.class); + return CreateTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponsePb.java new file mode 100755 index 000000000..5e9336b94 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateTokenResponsePb { + @JsonProperty("token_info") + private PublicTokenInfo tokenInfo; + + @JsonProperty("token_value") + private String tokenValue; + + public CreateTokenResponsePb setTokenInfo(PublicTokenInfo tokenInfo) { + this.tokenInfo = tokenInfo; + return this; + } + + public PublicTokenInfo getTokenInfo() { + return tokenInfo; + } + + public CreateTokenResponsePb setTokenValue(String tokenValue) { + this.tokenValue = tokenValue; + return this; + } + + public String getTokenValue() { + return tokenValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTokenResponsePb that = (CreateTokenResponsePb) o; + return Objects.equals(tokenInfo, that.tokenInfo) && Objects.equals(tokenValue, that.tokenValue); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfo, tokenValue); + } + + @Override + public String toString() { + return new ToStringer(CreateTokenResponsePb.class) + .add("tokenInfo", tokenInfo) + .add("tokenValue", tokenValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java index 23639b484..b1f87b334 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java @@ -21,7 +21,7 @@ public ExchangeTokenResponse exchangeToken(ExchangeTokenRequest request) { String path = "/api/2.0/credentials-manager/exchange-tokens/token"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ExchangeTokenResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccount.java index 789cbe8ae..7f64d427b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccount.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccount.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Account level policy for CSP */ @Generated +@JsonSerialize(using = CspEnablementAccount.CspEnablementAccountSerializer.class) +@JsonDeserialize(using = CspEnablementAccount.CspEnablementAccountDeserializer.class) public class CspEnablementAccount { /** * Set by customers when they request Compliance Security Profile (CSP) Invariants are enforced in * Settings policy. */ - @JsonProperty("compliance_standards") private Collection complianceStandards; /** Enforced = it cannot be overriden at workspace level. */ - @JsonProperty("is_enforced") private Boolean isEnforced; public CspEnablementAccount setComplianceStandards( @@ -62,4 +71,42 @@ public String toString() { .add("isEnforced", isEnforced) .toString(); } + + CspEnablementAccountPb toPb() { + CspEnablementAccountPb pb = new CspEnablementAccountPb(); + pb.setComplianceStandards(complianceStandards); + pb.setIsEnforced(isEnforced); + + return pb; + } + + static CspEnablementAccount fromPb(CspEnablementAccountPb pb) { + CspEnablementAccount model = new CspEnablementAccount(); + model.setComplianceStandards(pb.getComplianceStandards()); + model.setIsEnforced(pb.getIsEnforced()); + + return model; + } + + public static class CspEnablementAccountSerializer extends JsonSerializer { + @Override + public void serialize( + CspEnablementAccount value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CspEnablementAccountPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CspEnablementAccountDeserializer + extends JsonDeserializer { + @Override + public CspEnablementAccount deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CspEnablementAccountPb pb = mapper.readValue(p, CspEnablementAccountPb.class); + return CspEnablementAccount.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java index c30f6ef62..fb2beca63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java @@ -24,7 +24,7 @@ public CspEnablementAccountSetting get(GetCspEnablementAccountSettingRequest req apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, CspEnablementAccountSetting.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public CspEnablementAccountSetting update(UpdateCspEnablementAccountSettingReque apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CspEnablementAccountSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountPb.java new file mode 100755 index 000000000..4c077dbcc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Account level policy for CSP */ +@Generated +class CspEnablementAccountPb { + @JsonProperty("compliance_standards") + private Collection complianceStandards; + + @JsonProperty("is_enforced") + private Boolean isEnforced; + + public CspEnablementAccountPb setComplianceStandards( + Collection complianceStandards) { + this.complianceStandards = complianceStandards; + return this; + } + + public Collection getComplianceStandards() { + return complianceStandards; + } + + public CspEnablementAccountPb setIsEnforced(Boolean isEnforced) { + this.isEnforced = isEnforced; + return this; + } + + public Boolean getIsEnforced() { + return isEnforced; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CspEnablementAccountPb that = (CspEnablementAccountPb) o; + return Objects.equals(complianceStandards, that.complianceStandards) + && Objects.equals(isEnforced, that.isEnforced); + } + + @Override + public int hashCode() { + return Objects.hash(complianceStandards, isEnforced); + } + + @Override + public String toString() { + return new ToStringer(CspEnablementAccountPb.class) + .add("complianceStandards", complianceStandards) + .add("isEnforced", isEnforced) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java index 83ee147bf..cc18d25f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CspEnablementAccountSetting.CspEnablementAccountSettingSerializer.class) +@JsonDeserialize(using = CspEnablementAccountSetting.CspEnablementAccountSettingDeserializer.class) public class CspEnablementAccountSetting { /** Account level policy for CSP */ - @JsonProperty("csp_enablement_account") private CspEnablementAccount cspEnablementAccount; /** @@ -21,7 +31,6 @@ public class CspEnablementAccountSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class CspEnablementAccountSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public CspEnablementAccountSetting setCspEnablementAccount( @@ -84,4 +92,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + CspEnablementAccountSettingPb toPb() { + CspEnablementAccountSettingPb pb = new CspEnablementAccountSettingPb(); + pb.setCspEnablementAccount(cspEnablementAccount); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static CspEnablementAccountSetting fromPb(CspEnablementAccountSettingPb pb) { + CspEnablementAccountSetting model = new CspEnablementAccountSetting(); + model.setCspEnablementAccount(pb.getCspEnablementAccount()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class CspEnablementAccountSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + CspEnablementAccountSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CspEnablementAccountSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CspEnablementAccountSettingDeserializer + extends JsonDeserializer { + @Override + public CspEnablementAccountSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CspEnablementAccountSettingPb pb = mapper.readValue(p, CspEnablementAccountSettingPb.class); + return CspEnablementAccountSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSettingPb.java new file mode 100755 index 000000000..fd5cfedef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSettingPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CspEnablementAccountSettingPb { + @JsonProperty("csp_enablement_account") + private CspEnablementAccount cspEnablementAccount; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public CspEnablementAccountSettingPb setCspEnablementAccount( + CspEnablementAccount cspEnablementAccount) { + this.cspEnablementAccount = cspEnablementAccount; + return this; + } + + public CspEnablementAccount getCspEnablementAccount() { + return cspEnablementAccount; + } + + public CspEnablementAccountSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public CspEnablementAccountSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CspEnablementAccountSettingPb that = (CspEnablementAccountSettingPb) o; + return Objects.equals(cspEnablementAccount, that.cspEnablementAccount) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(cspEnablementAccount, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(CspEnablementAccountSettingPb.class) + .add("cspEnablementAccount", cspEnablementAccount) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java new file mode 100755 index 000000000..ddae67219 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java @@ -0,0 +1,366 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that for private endpoints towards a VPC + * endpoint service behind a customer-managed NLB, you must approve the endpoint in AWS console + * after initialization. + */ +@Generated +@JsonSerialize( + using = + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + .CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRuleSerializer.class) +@JsonDeserialize( + using = + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + .CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRuleDeserializer.class) +public class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule { + /** Databricks account ID. You can find your account ID from the Accounts Console. */ + private String accountId; + + /** + * The current status of this private endpoint. The private endpoint rules are effective only if + * the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + * resources in the AWS console before they take effect. The possible values are: - PENDING: The + * endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + * and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected + * by the private link resource owner. - DISCONNECTED: Connection was removed by the private link + * resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + * EXPIRED: If the endpoint is created but not approved in 14 days, it is EXPIRED. + */ + private CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState; + + /** Time in epoch milliseconds when this object was created. */ + private Long creationTime; + + /** Whether this private endpoint is deactivated. */ + private Boolean deactivated; + + /** Time in epoch milliseconds when this object was deactivated. */ + private Long deactivatedAt; + + /** + * Only used by private endpoints towards a VPC endpoint service for customer-managed VPC endpoint + * service. + * + *

The target AWS resource FQDNs accessible via the VPC endpoint service. When updating this + * field, we perform full update on this field. Please ensure a full list of desired domain_names + * is provided. + */ + private Collection domainNames; + + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + private Boolean enabled; + + /** + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + private String endpointService; + + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ + private String networkConnectivityConfigId; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + private Collection resourceNames; + + /** The ID of a private endpoint rule. */ + private String ruleId; + + /** Time in epoch milliseconds when this object was updated. */ + private Long updatedTime; + + /** + * The AWS VPC endpoint ID. You can use this ID to identify VPC endpoint created by Databricks. + */ + private String vpcEndpointId; + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setAccountId( + String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setConnectionState( + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState) { + this.connectionState = connectionState; + return this; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + getConnectionState() { + return connectionState; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setCreationTime( + Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDeactivated( + Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDeactivatedAt( + Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDomainNames( + Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setEndpointService( + String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setResourceNames( + Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setUpdatedTime( + Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setVpcEndpointId( + String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule that = + (CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointService, + networkConnectivityConfigId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointService", endpointService) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } + + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb toPb() { + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb pb = + new CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb(); + pb.setAccountId(accountId); + pb.setConnectionState(connectionState); + pb.setCreationTime(creationTime); + pb.setDeactivated(deactivated); + pb.setDeactivatedAt(deactivatedAt); + pb.setDomainNames(domainNames); + pb.setEnabled(enabled); + pb.setEndpointService(endpointService); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setResourceNames(resourceNames); + pb.setRuleId(ruleId); + pb.setUpdatedTime(updatedTime); + pb.setVpcEndpointId(vpcEndpointId); + + return pb; + } + + static CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule fromPb( + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb pb) { + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule model = + new CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule(); + model.setAccountId(pb.getAccountId()); + model.setConnectionState(pb.getConnectionState()); + model.setCreationTime(pb.getCreationTime()); + model.setDeactivated(pb.getDeactivated()); + model.setDeactivatedAt(pb.getDeactivatedAt()); + model.setDomainNames(pb.getDomainNames()); + model.setEnabled(pb.getEnabled()); + model.setEndpointService(pb.getEndpointService()); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setResourceNames(pb.getResourceNames()); + model.setRuleId(pb.getRuleId()); + model.setUpdatedTime(pb.getUpdatedTime()); + model.setVpcEndpointId(pb.getVpcEndpointId()); + + return model; + } + + public static class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb pb = + mapper.readValue( + p, CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb.class); + return CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb.java new file mode 100755 index 000000000..c8372b280 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb.java @@ -0,0 +1,246 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that for private endpoints towards a VPC + * endpoint service behind a customer-managed NLB, you must approve the endpoint in AWS console + * after initialization. + */ +@Generated +class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("connection_state") + private CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("deactivated") + private Boolean deactivated; + + @JsonProperty("deactivated_at") + private Long deactivatedAt; + + @JsonProperty("domain_names") + private Collection domainNames; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("endpoint_service") + private String endpointService; + + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + @JsonProperty("resource_names") + private Collection resourceNames; + + @JsonProperty("rule_id") + private String ruleId; + + @JsonProperty("updated_time") + private Long updatedTime; + + @JsonProperty("vpc_endpoint_id") + private String vpcEndpointId; + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setAccountId( + String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setConnectionState( + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState) { + this.connectionState = connectionState; + return this; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + getConnectionState() { + return connectionState; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setCreationTime( + Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setDeactivated( + Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setDeactivatedAt( + Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setDomainNames( + Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setEnabled( + Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setEndpointService( + String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb + setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setResourceNames( + Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setUpdatedTime( + Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb setVpcEndpointId( + String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb that = + (CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointService, + networkConnectivityConfigId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePb.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointService", endpointService) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java new file mode 100755 index 000000000..54c96f842 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState { + DISCONNECTED, + ESTABLISHED, + EXPIRED, + PENDING, + REJECTED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java index 1ba9dcb49..dee09a8d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardEmailSubscriptions.DashboardEmailSubscriptionsSerializer.class) +@JsonDeserialize(using = DashboardEmailSubscriptions.DashboardEmailSubscriptionsDeserializer.class) public class DashboardEmailSubscriptions { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -21,7 +31,6 @@ public class DashboardEmailSubscriptions { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class DashboardEmailSubscriptions { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public DashboardEmailSubscriptions setBooleanVal(BooleanMessage booleanVal) { @@ -83,4 +91,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + DashboardEmailSubscriptionsPb toPb() { + DashboardEmailSubscriptionsPb pb = new DashboardEmailSubscriptionsPb(); + pb.setBooleanVal(booleanVal); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static DashboardEmailSubscriptions fromPb(DashboardEmailSubscriptionsPb pb) { + DashboardEmailSubscriptions model = new DashboardEmailSubscriptions(); + model.setBooleanVal(pb.getBooleanVal()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class DashboardEmailSubscriptionsSerializer + extends JsonSerializer { + @Override + public void serialize( + DashboardEmailSubscriptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardEmailSubscriptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardEmailSubscriptionsDeserializer + extends JsonDeserializer { + @Override + public DashboardEmailSubscriptions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardEmailSubscriptionsPb pb = mapper.readValue(p, DashboardEmailSubscriptionsPb.class); + return DashboardEmailSubscriptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java index 767cb5e75..591ce96e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java @@ -22,7 +22,7 @@ public DeleteDashboardEmailSubscriptionsResponse delete( String path = "/api/2.0/settings/types/dashboard_email_subscriptions/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDashboardEmailSubscriptionsResponse.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public DashboardEmailSubscriptions get(GetDashboardEmailSubscriptionsRequest req String path = "/api/2.0/settings/types/dashboard_email_subscriptions/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DashboardEmailSubscriptions.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public DashboardEmailSubscriptions update(UpdateDashboardEmailSubscriptionsReque String path = "/api/2.0/settings/types/dashboard_email_subscriptions/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DashboardEmailSubscriptions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsPb.java new file mode 100755 index 000000000..e19950f05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DashboardEmailSubscriptionsPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public DashboardEmailSubscriptionsPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public DashboardEmailSubscriptionsPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DashboardEmailSubscriptionsPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardEmailSubscriptionsPb that = (DashboardEmailSubscriptionsPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(DashboardEmailSubscriptionsPb.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java index aefab63c0..b276cbebe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java @@ -22,7 +22,7 @@ public DeleteDefaultNamespaceSettingResponse delete( String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDefaultNamespaceSettingResponse.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public DefaultNamespaceSetting get(GetDefaultNamespaceSettingRequest request) { String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DefaultNamespaceSetting.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public DefaultNamespaceSetting update(UpdateDefaultNamespaceSettingRequest reque String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DefaultNamespaceSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java index eb12d3fd3..2dbc7029c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -17,6 +26,8 @@ * applies when using Unity Catalog-enabled compute. */ @Generated +@JsonSerialize(using = DefaultNamespaceSetting.DefaultNamespaceSettingSerializer.class) +@JsonDeserialize(using = DefaultNamespaceSetting.DefaultNamespaceSettingDeserializer.class) public class DefaultNamespaceSetting { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -26,11 +37,9 @@ public class DefaultNamespaceSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** */ - @JsonProperty("namespace") private StringMessage namespace; /** @@ -39,7 +48,6 @@ public class DefaultNamespaceSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public DefaultNamespaceSetting setEtag(String etag) { @@ -92,4 +100,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + DefaultNamespaceSettingPb toPb() { + DefaultNamespaceSettingPb pb = new DefaultNamespaceSettingPb(); + pb.setEtag(etag); + pb.setNamespace(namespace); + pb.setSettingName(settingName); + + return pb; + } + + static DefaultNamespaceSetting fromPb(DefaultNamespaceSettingPb pb) { + DefaultNamespaceSetting model = new DefaultNamespaceSetting(); + model.setEtag(pb.getEtag()); + model.setNamespace(pb.getNamespace()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class DefaultNamespaceSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + DefaultNamespaceSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DefaultNamespaceSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DefaultNamespaceSettingDeserializer + extends JsonDeserializer { + @Override + public DefaultNamespaceSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DefaultNamespaceSettingPb pb = mapper.readValue(p, DefaultNamespaceSettingPb.class); + return DefaultNamespaceSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSettingPb.java new file mode 100755 index 000000000..1578c25f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSettingPb.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * This represents the setting configuration for the default namespace in the Databricks workspace. + * Setting the default catalog for the workspace determines the catalog that is used when queries do + * not reference a fully qualified 3 level name. For example, if the default catalog is set to + * 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object + * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a + * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only + * applies when using Unity Catalog-enabled compute. + */ +@Generated +class DefaultNamespaceSettingPb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("namespace") + private StringMessage namespace; + + @JsonProperty("setting_name") + private String settingName; + + public DefaultNamespaceSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DefaultNamespaceSettingPb setNamespace(StringMessage namespace) { + this.namespace = namespace; + return this; + } + + public StringMessage getNamespace() { + return namespace; + } + + public DefaultNamespaceSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultNamespaceSettingPb that = (DefaultNamespaceSettingPb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(namespace, that.namespace) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(etag, namespace, settingName); + } + + @Override + public String toString() { + return new ToStringer(DefaultNamespaceSettingPb.class) + .add("etag", etag) + .add("namespace", namespace) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java index ef3df304b..aa6664edd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the account IP access toggle setting */ @Generated +@JsonSerialize( + using = DeleteAccountIpAccessEnableRequest.DeleteAccountIpAccessEnableRequestSerializer.class) +@JsonDeserialize( + using = DeleteAccountIpAccessEnableRequest.DeleteAccountIpAccessEnableRequestDeserializer.class) public class DeleteAccountIpAccessEnableRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class DeleteAccountIpAccessEnableRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteAccountIpAccessEnableRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAccountIpAccessEnableRequest.class).add("etag", etag).toString(); } + + DeleteAccountIpAccessEnableRequestPb toPb() { + DeleteAccountIpAccessEnableRequestPb pb = new DeleteAccountIpAccessEnableRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAccountIpAccessEnableRequest fromPb(DeleteAccountIpAccessEnableRequestPb pb) { + DeleteAccountIpAccessEnableRequest model = new DeleteAccountIpAccessEnableRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAccountIpAccessEnableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountIpAccessEnableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountIpAccessEnableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountIpAccessEnableRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountIpAccessEnableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountIpAccessEnableRequestPb pb = + mapper.readValue(p, DeleteAccountIpAccessEnableRequestPb.class); + return DeleteAccountIpAccessEnableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequestPb.java new file mode 100755 index 000000000..72d57025f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the account IP access toggle setting */ +@Generated +class DeleteAccountIpAccessEnableRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteAccountIpAccessEnableRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountIpAccessEnableRequestPb that = (DeleteAccountIpAccessEnableRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountIpAccessEnableRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java index 7151c3fec..7a510de98 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java @@ -4,11 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = DeleteAccountIpAccessEnableResponse.DeleteAccountIpAccessEnableResponseSerializer.class) +@JsonDeserialize( + using = + DeleteAccountIpAccessEnableResponse.DeleteAccountIpAccessEnableResponseDeserializer.class) public class DeleteAccountIpAccessEnableResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +32,6 @@ public class DeleteAccountIpAccessEnableResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteAccountIpAccessEnableResponse setEtag(String etag) { @@ -47,4 +60,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAccountIpAccessEnableResponse.class).add("etag", etag).toString(); } + + DeleteAccountIpAccessEnableResponsePb toPb() { + DeleteAccountIpAccessEnableResponsePb pb = new DeleteAccountIpAccessEnableResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAccountIpAccessEnableResponse fromPb(DeleteAccountIpAccessEnableResponsePb pb) { + DeleteAccountIpAccessEnableResponse model = new DeleteAccountIpAccessEnableResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAccountIpAccessEnableResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountIpAccessEnableResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountIpAccessEnableResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountIpAccessEnableResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountIpAccessEnableResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountIpAccessEnableResponsePb pb = + mapper.readValue(p, DeleteAccountIpAccessEnableResponsePb.class); + return DeleteAccountIpAccessEnableResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponsePb.java new file mode 100755 index 000000000..83bfe2403 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteAccountIpAccessEnableResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteAccountIpAccessEnableResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountIpAccessEnableResponsePb that = (DeleteAccountIpAccessEnableResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountIpAccessEnableResponsePb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java index 0a3852ec5..7807aad0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete access list */ @Generated +@JsonSerialize( + using = DeleteAccountIpAccessListRequest.DeleteAccountIpAccessListRequestSerializer.class) +@JsonDeserialize( + using = DeleteAccountIpAccessListRequest.DeleteAccountIpAccessListRequestDeserializer.class) public class DeleteAccountIpAccessListRequest { /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; public DeleteAccountIpAccessListRequest setIpAccessListId(String ipAccessListId) { this.ipAccessListId = ipAccessListId; @@ -41,4 +54,42 @@ public String toString() { .add("ipAccessListId", ipAccessListId) .toString(); } + + DeleteAccountIpAccessListRequestPb toPb() { + DeleteAccountIpAccessListRequestPb pb = new DeleteAccountIpAccessListRequestPb(); + pb.setIpAccessListId(ipAccessListId); + + return pb; + } + + static DeleteAccountIpAccessListRequest fromPb(DeleteAccountIpAccessListRequestPb pb) { + DeleteAccountIpAccessListRequest model = new DeleteAccountIpAccessListRequest(); + model.setIpAccessListId(pb.getIpAccessListId()); + + return model; + } + + public static class DeleteAccountIpAccessListRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAccountIpAccessListRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAccountIpAccessListRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAccountIpAccessListRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAccountIpAccessListRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAccountIpAccessListRequestPb pb = + mapper.readValue(p, DeleteAccountIpAccessListRequestPb.class); + return DeleteAccountIpAccessListRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequestPb.java new file mode 100755 index 000000000..5b0696f33 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete access list */ +@Generated +class DeleteAccountIpAccessListRequestPb { + @JsonIgnore private String ipAccessListId; + + public DeleteAccountIpAccessListRequestPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAccountIpAccessListRequestPb that = (DeleteAccountIpAccessListRequestPb) o; + return Objects.equals(ipAccessListId, that.ipAccessListId); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessListId); + } + + @Override + public String toString() { + return new ToStringer(DeleteAccountIpAccessListRequestPb.class) + .add("ipAccessListId", ipAccessListId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java index 468e464fb..53a9ad4b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the AI/BI dashboard embedding access policy */ @Generated +@JsonSerialize( + using = + DeleteAibiDashboardEmbeddingAccessPolicySettingRequest + .DeleteAibiDashboardEmbeddingAccessPolicySettingRequestSerializer.class) +@JsonDeserialize( + using = + DeleteAibiDashboardEmbeddingAccessPolicySettingRequest + .DeleteAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer.class) public class DeleteAibiDashboardEmbeddingAccessPolicySettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class DeleteAibiDashboardEmbeddingAccessPolicySettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb toPb() { + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + new DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAibiDashboardEmbeddingAccessPolicySettingRequest fromPb( + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb pb) { + DeleteAibiDashboardEmbeddingAccessPolicySettingRequest model = + new DeleteAibiDashboardEmbeddingAccessPolicySettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAibiDashboardEmbeddingAccessPolicySettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAibiDashboardEmbeddingAccessPolicySettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAibiDashboardEmbeddingAccessPolicySettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + mapper.readValue(p, DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb.class); + return DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb.java new file mode 100755 index 000000000..809cd9bb1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the AI/BI dashboard embedding access policy */ +@Generated +class DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb that = + (DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAibiDashboardEmbeddingAccessPolicySettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java index 6acb42ae6..2d891d4e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java @@ -4,11 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteAibiDashboardEmbeddingAccessPolicySettingResponse + .DeleteAibiDashboardEmbeddingAccessPolicySettingResponseSerializer.class) +@JsonDeserialize( + using = + DeleteAibiDashboardEmbeddingAccessPolicySettingResponse + .DeleteAibiDashboardEmbeddingAccessPolicySettingResponseDeserializer.class) public class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +35,6 @@ public class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse setEtag(String etag) { @@ -50,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb toPb() { + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb pb = + new DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAibiDashboardEmbeddingAccessPolicySettingResponse fromPb( + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb pb) { + DeleteAibiDashboardEmbeddingAccessPolicySettingResponse model = + new DeleteAibiDashboardEmbeddingAccessPolicySettingResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAibiDashboardEmbeddingAccessPolicySettingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAibiDashboardEmbeddingAccessPolicySettingResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAibiDashboardEmbeddingAccessPolicySettingResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb pb = + mapper.readValue(p, DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb.class); + return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb.java new file mode 100755 index 000000000..2ea050b6d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb that = + (DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAibiDashboardEmbeddingAccessPolicySettingResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java index 686ba154e..ec86862df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete AI/BI dashboard embedding approved domains */ @Generated +@JsonSerialize( + using = + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest + .DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer.class) +@JsonDeserialize( + using = + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest + .DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer.class) public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb toPb() { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + new DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest fromPb( + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb) { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest model = + new DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + mapper.readValue(p, DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class); + return DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java new file mode 100755 index 000000000..b6844543d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete AI/BI dashboard embedding approved domains */ +@Generated +class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb that = + (DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java index 8ee982c58..63246b401 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java @@ -4,11 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse + .DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponseSerializer.class) +@JsonDeserialize( + using = + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse + .DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponseDeserializer.class) public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +35,6 @@ public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse setEtag(String etag) { @@ -50,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb toPb() { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb pb = + new DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse fromPb( + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb pb) { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse model = + new DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb pb = + mapper.readValue(p, DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb.class); + return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb.java new file mode 100755 index 000000000..d659e9b44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb that = + (DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequest.java index 8d3d36912..e628f39af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the Dashboard Email Subscriptions setting */ @Generated +@JsonSerialize( + using = + DeleteDashboardEmailSubscriptionsRequest.DeleteDashboardEmailSubscriptionsRequestSerializer + .class) +@JsonDeserialize( + using = + DeleteDashboardEmailSubscriptionsRequest + .DeleteDashboardEmailSubscriptionsRequestDeserializer.class) public class DeleteDashboardEmailSubscriptionsRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class DeleteDashboardEmailSubscriptionsRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteDashboardEmailSubscriptionsRequest setEtag(String etag) { @@ -51,4 +65,46 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteDashboardEmailSubscriptionsRequestPb toPb() { + DeleteDashboardEmailSubscriptionsRequestPb pb = + new DeleteDashboardEmailSubscriptionsRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDashboardEmailSubscriptionsRequest fromPb( + DeleteDashboardEmailSubscriptionsRequestPb pb) { + DeleteDashboardEmailSubscriptionsRequest model = new DeleteDashboardEmailSubscriptionsRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDashboardEmailSubscriptionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDashboardEmailSubscriptionsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteDashboardEmailSubscriptionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDashboardEmailSubscriptionsRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDashboardEmailSubscriptionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDashboardEmailSubscriptionsRequestPb pb = + mapper.readValue(p, DeleteDashboardEmailSubscriptionsRequestPb.class); + return DeleteDashboardEmailSubscriptionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequestPb.java new file mode 100755 index 000000000..367b21418 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the Dashboard Email Subscriptions setting */ +@Generated +class DeleteDashboardEmailSubscriptionsRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDashboardEmailSubscriptionsRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDashboardEmailSubscriptionsRequestPb that = + (DeleteDashboardEmailSubscriptionsRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDashboardEmailSubscriptionsRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java index 1cfa511ae..afbfe66f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java @@ -4,11 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteDashboardEmailSubscriptionsResponse + .DeleteDashboardEmailSubscriptionsResponseSerializer.class) +@JsonDeserialize( + using = + DeleteDashboardEmailSubscriptionsResponse + .DeleteDashboardEmailSubscriptionsResponseDeserializer.class) public class DeleteDashboardEmailSubscriptionsResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +35,6 @@ public class DeleteDashboardEmailSubscriptionsResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteDashboardEmailSubscriptionsResponse setEtag(String etag) { @@ -49,4 +65,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteDashboardEmailSubscriptionsResponsePb toPb() { + DeleteDashboardEmailSubscriptionsResponsePb pb = + new DeleteDashboardEmailSubscriptionsResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDashboardEmailSubscriptionsResponse fromPb( + DeleteDashboardEmailSubscriptionsResponsePb pb) { + DeleteDashboardEmailSubscriptionsResponse model = + new DeleteDashboardEmailSubscriptionsResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDashboardEmailSubscriptionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDashboardEmailSubscriptionsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteDashboardEmailSubscriptionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDashboardEmailSubscriptionsResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDashboardEmailSubscriptionsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDashboardEmailSubscriptionsResponsePb pb = + mapper.readValue(p, DeleteDashboardEmailSubscriptionsResponsePb.class); + return DeleteDashboardEmailSubscriptionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponsePb.java new file mode 100755 index 000000000..4161a7ba2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteDashboardEmailSubscriptionsResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteDashboardEmailSubscriptionsResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDashboardEmailSubscriptionsResponsePb that = + (DeleteDashboardEmailSubscriptionsResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDashboardEmailSubscriptionsResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java index 29aa023ed..1455b2f88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java @@ -3,13 +3,27 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the default namespace setting */ @Generated +@JsonSerialize( + using = + DeleteDefaultNamespaceSettingRequest.DeleteDefaultNamespaceSettingRequestSerializer.class) +@JsonDeserialize( + using = + DeleteDefaultNamespaceSettingRequest.DeleteDefaultNamespaceSettingRequestDeserializer.class) public class DeleteDefaultNamespaceSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +33,6 @@ public class DeleteDefaultNamespaceSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteDefaultNamespaceSettingRequest setEtag(String etag) { @@ -49,4 +61,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDefaultNamespaceSettingRequest.class).add("etag", etag).toString(); } + + DeleteDefaultNamespaceSettingRequestPb toPb() { + DeleteDefaultNamespaceSettingRequestPb pb = new DeleteDefaultNamespaceSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDefaultNamespaceSettingRequest fromPb(DeleteDefaultNamespaceSettingRequestPb pb) { + DeleteDefaultNamespaceSettingRequest model = new DeleteDefaultNamespaceSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDefaultNamespaceSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDefaultNamespaceSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDefaultNamespaceSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDefaultNamespaceSettingRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDefaultNamespaceSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDefaultNamespaceSettingRequestPb pb = + mapper.readValue(p, DeleteDefaultNamespaceSettingRequestPb.class); + return DeleteDefaultNamespaceSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequestPb.java new file mode 100755 index 000000000..6d44abc3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the default namespace setting */ +@Generated +class DeleteDefaultNamespaceSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDefaultNamespaceSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDefaultNamespaceSettingRequestPb that = (DeleteDefaultNamespaceSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDefaultNamespaceSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java index 7cc259234..8807272ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java @@ -4,11 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteDefaultNamespaceSettingResponse.DeleteDefaultNamespaceSettingResponseSerializer.class) +@JsonDeserialize( + using = + DeleteDefaultNamespaceSettingResponse.DeleteDefaultNamespaceSettingResponseDeserializer + .class) public class DeleteDefaultNamespaceSettingResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +34,6 @@ public class DeleteDefaultNamespaceSettingResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteDefaultNamespaceSettingResponse setEtag(String etag) { @@ -47,4 +62,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDefaultNamespaceSettingResponse.class).add("etag", etag).toString(); } + + DeleteDefaultNamespaceSettingResponsePb toPb() { + DeleteDefaultNamespaceSettingResponsePb pb = new DeleteDefaultNamespaceSettingResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDefaultNamespaceSettingResponse fromPb(DeleteDefaultNamespaceSettingResponsePb pb) { + DeleteDefaultNamespaceSettingResponse model = new DeleteDefaultNamespaceSettingResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDefaultNamespaceSettingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDefaultNamespaceSettingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDefaultNamespaceSettingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDefaultNamespaceSettingResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDefaultNamespaceSettingResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDefaultNamespaceSettingResponsePb pb = + mapper.readValue(p, DeleteDefaultNamespaceSettingResponsePb.class); + return DeleteDefaultNamespaceSettingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponsePb.java new file mode 100755 index 000000000..deac64b2e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteDefaultNamespaceSettingResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteDefaultNamespaceSettingResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDefaultNamespaceSettingResponsePb that = (DeleteDefaultNamespaceSettingResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDefaultNamespaceSettingResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java index c8824e5ed..0c0b4697e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete Legacy Access Disablement Status */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyAccessRequest.DeleteDisableLegacyAccessRequestSerializer.class) +@JsonDeserialize( + using = DeleteDisableLegacyAccessRequest.DeleteDisableLegacyAccessRequestDeserializer.class) public class DeleteDisableLegacyAccessRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class DeleteDisableLegacyAccessRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteDisableLegacyAccessRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyAccessRequest.class).add("etag", etag).toString(); } + + DeleteDisableLegacyAccessRequestPb toPb() { + DeleteDisableLegacyAccessRequestPb pb = new DeleteDisableLegacyAccessRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyAccessRequest fromPb(DeleteDisableLegacyAccessRequestPb pb) { + DeleteDisableLegacyAccessRequest model = new DeleteDisableLegacyAccessRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyAccessRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyAccessRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyAccessRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyAccessRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyAccessRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyAccessRequestPb pb = + mapper.readValue(p, DeleteDisableLegacyAccessRequestPb.class); + return DeleteDisableLegacyAccessRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequestPb.java new file mode 100755 index 000000000..ef2fe5000 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete Legacy Access Disablement Status */ +@Generated +class DeleteDisableLegacyAccessRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDisableLegacyAccessRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyAccessRequestPb that = (DeleteDisableLegacyAccessRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyAccessRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java index ad22d588f..c9ec95a28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java @@ -4,11 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyAccessResponse.DeleteDisableLegacyAccessResponseSerializer.class) +@JsonDeserialize( + using = DeleteDisableLegacyAccessResponse.DeleteDisableLegacyAccessResponseDeserializer.class) public class DeleteDisableLegacyAccessResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +31,6 @@ public class DeleteDisableLegacyAccessResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteDisableLegacyAccessResponse setEtag(String etag) { @@ -47,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyAccessResponse.class).add("etag", etag).toString(); } + + DeleteDisableLegacyAccessResponsePb toPb() { + DeleteDisableLegacyAccessResponsePb pb = new DeleteDisableLegacyAccessResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyAccessResponse fromPb(DeleteDisableLegacyAccessResponsePb pb) { + DeleteDisableLegacyAccessResponse model = new DeleteDisableLegacyAccessResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyAccessResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyAccessResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyAccessResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyAccessResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyAccessResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyAccessResponsePb pb = + mapper.readValue(p, DeleteDisableLegacyAccessResponsePb.class); + return DeleteDisableLegacyAccessResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponsePb.java new file mode 100755 index 000000000..77619bcae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteDisableLegacyAccessResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteDisableLegacyAccessResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyAccessResponsePb that = (DeleteDisableLegacyAccessResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyAccessResponsePb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java index 51e3b14b3..6d69d7a2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the disable legacy DBFS setting */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyDbfsRequest.DeleteDisableLegacyDbfsRequestSerializer.class) +@JsonDeserialize( + using = DeleteDisableLegacyDbfsRequest.DeleteDisableLegacyDbfsRequestDeserializer.class) public class DeleteDisableLegacyDbfsRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class DeleteDisableLegacyDbfsRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteDisableLegacyDbfsRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyDbfsRequest.class).add("etag", etag).toString(); } + + DeleteDisableLegacyDbfsRequestPb toPb() { + DeleteDisableLegacyDbfsRequestPb pb = new DeleteDisableLegacyDbfsRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyDbfsRequest fromPb(DeleteDisableLegacyDbfsRequestPb pb) { + DeleteDisableLegacyDbfsRequest model = new DeleteDisableLegacyDbfsRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyDbfsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyDbfsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyDbfsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyDbfsRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyDbfsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyDbfsRequestPb pb = + mapper.readValue(p, DeleteDisableLegacyDbfsRequestPb.class); + return DeleteDisableLegacyDbfsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequestPb.java new file mode 100755 index 000000000..12f850034 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the disable legacy DBFS setting */ +@Generated +class DeleteDisableLegacyDbfsRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDisableLegacyDbfsRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyDbfsRequestPb that = (DeleteDisableLegacyDbfsRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyDbfsRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java index 0bb408d38..750d6db9b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java @@ -4,11 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyDbfsResponse.DeleteDisableLegacyDbfsResponseSerializer.class) +@JsonDeserialize( + using = DeleteDisableLegacyDbfsResponse.DeleteDisableLegacyDbfsResponseDeserializer.class) public class DeleteDisableLegacyDbfsResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +31,6 @@ public class DeleteDisableLegacyDbfsResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteDisableLegacyDbfsResponse setEtag(String etag) { @@ -47,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyDbfsResponse.class).add("etag", etag).toString(); } + + DeleteDisableLegacyDbfsResponsePb toPb() { + DeleteDisableLegacyDbfsResponsePb pb = new DeleteDisableLegacyDbfsResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyDbfsResponse fromPb(DeleteDisableLegacyDbfsResponsePb pb) { + DeleteDisableLegacyDbfsResponse model = new DeleteDisableLegacyDbfsResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyDbfsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyDbfsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyDbfsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyDbfsResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyDbfsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyDbfsResponsePb pb = + mapper.readValue(p, DeleteDisableLegacyDbfsResponsePb.class); + return DeleteDisableLegacyDbfsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponsePb.java new file mode 100755 index 000000000..6cb108371 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteDisableLegacyDbfsResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteDisableLegacyDbfsResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyDbfsResponsePb that = (DeleteDisableLegacyDbfsResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyDbfsResponsePb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java index 23fbc5949..bb179f657 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the disable legacy features setting */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyFeaturesRequest.DeleteDisableLegacyFeaturesRequestSerializer.class) +@JsonDeserialize( + using = DeleteDisableLegacyFeaturesRequest.DeleteDisableLegacyFeaturesRequestDeserializer.class) public class DeleteDisableLegacyFeaturesRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class DeleteDisableLegacyFeaturesRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteDisableLegacyFeaturesRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyFeaturesRequest.class).add("etag", etag).toString(); } + + DeleteDisableLegacyFeaturesRequestPb toPb() { + DeleteDisableLegacyFeaturesRequestPb pb = new DeleteDisableLegacyFeaturesRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyFeaturesRequest fromPb(DeleteDisableLegacyFeaturesRequestPb pb) { + DeleteDisableLegacyFeaturesRequest model = new DeleteDisableLegacyFeaturesRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyFeaturesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyFeaturesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyFeaturesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyFeaturesRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyFeaturesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyFeaturesRequestPb pb = + mapper.readValue(p, DeleteDisableLegacyFeaturesRequestPb.class); + return DeleteDisableLegacyFeaturesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequestPb.java new file mode 100755 index 000000000..71722fb3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the disable legacy features setting */ +@Generated +class DeleteDisableLegacyFeaturesRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDisableLegacyFeaturesRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyFeaturesRequestPb that = (DeleteDisableLegacyFeaturesRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyFeaturesRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java index dd7b8d8d3..99e649629 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java @@ -4,11 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = DeleteDisableLegacyFeaturesResponse.DeleteDisableLegacyFeaturesResponseSerializer.class) +@JsonDeserialize( + using = + DeleteDisableLegacyFeaturesResponse.DeleteDisableLegacyFeaturesResponseDeserializer.class) public class DeleteDisableLegacyFeaturesResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +32,6 @@ public class DeleteDisableLegacyFeaturesResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteDisableLegacyFeaturesResponse setEtag(String etag) { @@ -47,4 +60,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDisableLegacyFeaturesResponse.class).add("etag", etag).toString(); } + + DeleteDisableLegacyFeaturesResponsePb toPb() { + DeleteDisableLegacyFeaturesResponsePb pb = new DeleteDisableLegacyFeaturesResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteDisableLegacyFeaturesResponse fromPb(DeleteDisableLegacyFeaturesResponsePb pb) { + DeleteDisableLegacyFeaturesResponse model = new DeleteDisableLegacyFeaturesResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteDisableLegacyFeaturesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDisableLegacyFeaturesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDisableLegacyFeaturesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDisableLegacyFeaturesResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDisableLegacyFeaturesResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDisableLegacyFeaturesResponsePb pb = + mapper.readValue(p, DeleteDisableLegacyFeaturesResponsePb.class); + return DeleteDisableLegacyFeaturesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponsePb.java new file mode 100755 index 000000000..b59abeb7b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteDisableLegacyFeaturesResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteDisableLegacyFeaturesResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyFeaturesResponsePb that = (DeleteDisableLegacyFeaturesResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyFeaturesResponsePb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java index 1500e8355..124407a38 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete access list */ @Generated +@JsonSerialize(using = DeleteIpAccessListRequest.DeleteIpAccessListRequestSerializer.class) +@JsonDeserialize(using = DeleteIpAccessListRequest.DeleteIpAccessListRequestDeserializer.class) public class DeleteIpAccessListRequest { /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; public DeleteIpAccessListRequest setIpAccessListId(String ipAccessListId) { this.ipAccessListId = ipAccessListId; @@ -41,4 +52,41 @@ public String toString() { .add("ipAccessListId", ipAccessListId) .toString(); } + + DeleteIpAccessListRequestPb toPb() { + DeleteIpAccessListRequestPb pb = new DeleteIpAccessListRequestPb(); + pb.setIpAccessListId(ipAccessListId); + + return pb; + } + + static DeleteIpAccessListRequest fromPb(DeleteIpAccessListRequestPb pb) { + DeleteIpAccessListRequest model = new DeleteIpAccessListRequest(); + model.setIpAccessListId(pb.getIpAccessListId()); + + return model; + } + + public static class DeleteIpAccessListRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteIpAccessListRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteIpAccessListRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteIpAccessListRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteIpAccessListRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteIpAccessListRequestPb pb = mapper.readValue(p, DeleteIpAccessListRequestPb.class); + return DeleteIpAccessListRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequestPb.java new file mode 100755 index 000000000..876c5b028 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete access list */ +@Generated +class DeleteIpAccessListRequestPb { + @JsonIgnore private String ipAccessListId; + + public DeleteIpAccessListRequestPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteIpAccessListRequestPb that = (DeleteIpAccessListRequestPb) o; + return Objects.equals(ipAccessListId, that.ipAccessListId); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessListId); + } + + @Override + public String toString() { + return new ToStringer(DeleteIpAccessListRequestPb.class) + .add("ipAccessListId", ipAccessListId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java index b746ad732..1a002b9d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the enable partner powered AI features workspace setting */ @Generated +@JsonSerialize( + using = + DeleteLlmProxyPartnerPoweredWorkspaceRequest + .DeleteLlmProxyPartnerPoweredWorkspaceRequestSerializer.class) +@JsonDeserialize( + using = + DeleteLlmProxyPartnerPoweredWorkspaceRequest + .DeleteLlmProxyPartnerPoweredWorkspaceRequestDeserializer.class) public class DeleteLlmProxyPartnerPoweredWorkspaceRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class DeleteLlmProxyPartnerPoweredWorkspaceRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb toPb() { + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb pb = + new DeleteLlmProxyPartnerPoweredWorkspaceRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteLlmProxyPartnerPoweredWorkspaceRequest fromPb( + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb pb) { + DeleteLlmProxyPartnerPoweredWorkspaceRequest model = + new DeleteLlmProxyPartnerPoweredWorkspaceRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteLlmProxyPartnerPoweredWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLlmProxyPartnerPoweredWorkspaceRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLlmProxyPartnerPoweredWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteLlmProxyPartnerPoweredWorkspaceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb pb = + mapper.readValue(p, DeleteLlmProxyPartnerPoweredWorkspaceRequestPb.class); + return DeleteLlmProxyPartnerPoweredWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequestPb.java new file mode 100755 index 000000000..253a05db3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the enable partner powered AI features workspace setting */ +@Generated +class DeleteLlmProxyPartnerPoweredWorkspaceRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteLlmProxyPartnerPoweredWorkspaceRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLlmProxyPartnerPoweredWorkspaceRequestPb that = + (DeleteLlmProxyPartnerPoweredWorkspaceRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java index 10524ea66..fefa37967 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java @@ -4,11 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteLlmProxyPartnerPoweredWorkspaceResponse + .DeleteLlmProxyPartnerPoweredWorkspaceResponseSerializer.class) +@JsonDeserialize( + using = + DeleteLlmProxyPartnerPoweredWorkspaceResponse + .DeleteLlmProxyPartnerPoweredWorkspaceResponseDeserializer.class) public class DeleteLlmProxyPartnerPoweredWorkspaceResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +35,6 @@ public class DeleteLlmProxyPartnerPoweredWorkspaceResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteLlmProxyPartnerPoweredWorkspaceResponse setEtag(String etag) { @@ -50,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb toPb() { + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb pb = + new DeleteLlmProxyPartnerPoweredWorkspaceResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteLlmProxyPartnerPoweredWorkspaceResponse fromPb( + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb pb) { + DeleteLlmProxyPartnerPoweredWorkspaceResponse model = + new DeleteLlmProxyPartnerPoweredWorkspaceResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteLlmProxyPartnerPoweredWorkspaceResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteLlmProxyPartnerPoweredWorkspaceResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteLlmProxyPartnerPoweredWorkspaceResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteLlmProxyPartnerPoweredWorkspaceResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb pb = + mapper.readValue(p, DeleteLlmProxyPartnerPoweredWorkspaceResponsePb.class); + return DeleteLlmProxyPartnerPoweredWorkspaceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponsePb.java new file mode 100755 index 000000000..19618b2b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteLlmProxyPartnerPoweredWorkspaceResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteLlmProxyPartnerPoweredWorkspaceResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLlmProxyPartnerPoweredWorkspaceResponsePb that = + (DeleteLlmProxyPartnerPoweredWorkspaceResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java index b4a5f6e09..c237694c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a network connectivity configuration */ @Generated +@JsonSerialize( + using = + DeleteNetworkConnectivityConfigurationRequest + .DeleteNetworkConnectivityConfigurationRequestSerializer.class) +@JsonDeserialize( + using = + DeleteNetworkConnectivityConfigurationRequest + .DeleteNetworkConnectivityConfigurationRequestDeserializer.class) public class DeleteNetworkConnectivityConfigurationRequest { /** Your Network Connectivity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; public DeleteNetworkConnectivityConfigurationRequest setNetworkConnectivityConfigId( String networkConnectivityConfigId) { @@ -43,4 +60,47 @@ public String toString() { .add("networkConnectivityConfigId", networkConnectivityConfigId) .toString(); } + + DeleteNetworkConnectivityConfigurationRequestPb toPb() { + DeleteNetworkConnectivityConfigurationRequestPb pb = + new DeleteNetworkConnectivityConfigurationRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + + return pb; + } + + static DeleteNetworkConnectivityConfigurationRequest fromPb( + DeleteNetworkConnectivityConfigurationRequestPb pb) { + DeleteNetworkConnectivityConfigurationRequest model = + new DeleteNetworkConnectivityConfigurationRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + + return model; + } + + public static class DeleteNetworkConnectivityConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteNetworkConnectivityConfigurationRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteNetworkConnectivityConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNetworkConnectivityConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteNetworkConnectivityConfigurationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNetworkConnectivityConfigurationRequestPb pb = + mapper.readValue(p, DeleteNetworkConnectivityConfigurationRequestPb.class); + return DeleteNetworkConnectivityConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequestPb.java new file mode 100755 index 000000000..980ae86fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a network connectivity configuration */ +@Generated +class DeleteNetworkConnectivityConfigurationRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + public DeleteNetworkConnectivityConfigurationRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteNetworkConnectivityConfigurationRequestPb that = + (DeleteNetworkConnectivityConfigurationRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkConnectivityConfigurationRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java index 2a3b7a38e..1c8fc461e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java @@ -4,9 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + DeleteNetworkConnectivityConfigurationResponse + .DeleteNetworkConnectivityConfigurationResponseSerializer.class) +@JsonDeserialize( + using = + DeleteNetworkConnectivityConfigurationResponse + .DeleteNetworkConnectivityConfigurationResponseDeserializer.class) public class DeleteNetworkConnectivityConfigurationResponse { @Override @@ -25,4 +43,45 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteNetworkConnectivityConfigurationResponse.class).toString(); } + + DeleteNetworkConnectivityConfigurationResponsePb toPb() { + DeleteNetworkConnectivityConfigurationResponsePb pb = + new DeleteNetworkConnectivityConfigurationResponsePb(); + + return pb; + } + + static DeleteNetworkConnectivityConfigurationResponse fromPb( + DeleteNetworkConnectivityConfigurationResponsePb pb) { + DeleteNetworkConnectivityConfigurationResponse model = + new DeleteNetworkConnectivityConfigurationResponse(); + + return model; + } + + public static class DeleteNetworkConnectivityConfigurationResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteNetworkConnectivityConfigurationResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteNetworkConnectivityConfigurationResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNetworkConnectivityConfigurationResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteNetworkConnectivityConfigurationResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNetworkConnectivityConfigurationResponsePb pb = + mapper.readValue(p, DeleteNetworkConnectivityConfigurationResponsePb.class); + return DeleteNetworkConnectivityConfigurationResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponsePb.java new file mode 100755 index 000000000..46cfcf53a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteNetworkConnectivityConfigurationResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkConnectivityConfigurationResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java index cd646bba1..6f29c5fd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a network policy */ @Generated +@JsonSerialize(using = DeleteNetworkPolicyRequest.DeleteNetworkPolicyRequestSerializer.class) +@JsonDeserialize(using = DeleteNetworkPolicyRequest.DeleteNetworkPolicyRequestDeserializer.class) public class DeleteNetworkPolicyRequest { /** The unique identifier of the network policy to delete. */ - @JsonIgnore private String networkPolicyId; + private String networkPolicyId; public DeleteNetworkPolicyRequest setNetworkPolicyId(String networkPolicyId) { this.networkPolicyId = networkPolicyId; @@ -41,4 +52,41 @@ public String toString() { .add("networkPolicyId", networkPolicyId) .toString(); } + + DeleteNetworkPolicyRequestPb toPb() { + DeleteNetworkPolicyRequestPb pb = new DeleteNetworkPolicyRequestPb(); + pb.setNetworkPolicyId(networkPolicyId); + + return pb; + } + + static DeleteNetworkPolicyRequest fromPb(DeleteNetworkPolicyRequestPb pb) { + DeleteNetworkPolicyRequest model = new DeleteNetworkPolicyRequest(); + model.setNetworkPolicyId(pb.getNetworkPolicyId()); + + return model; + } + + public static class DeleteNetworkPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteNetworkPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteNetworkPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNetworkPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteNetworkPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNetworkPolicyRequestPb pb = mapper.readValue(p, DeleteNetworkPolicyRequestPb.class); + return DeleteNetworkPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequestPb.java new file mode 100755 index 000000000..91e2ba596 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a network policy */ +@Generated +class DeleteNetworkPolicyRequestPb { + @JsonIgnore private String networkPolicyId; + + public DeleteNetworkPolicyRequestPb setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteNetworkPolicyRequestPb that = (DeleteNetworkPolicyRequestPb) o; + return Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkPolicyRequestPb.class) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java index 5d161f110..f26d16128 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java @@ -4,9 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = DeleteNetworkPolicyRpcResponse.DeleteNetworkPolicyRpcResponseSerializer.class) +@JsonDeserialize( + using = DeleteNetworkPolicyRpcResponse.DeleteNetworkPolicyRpcResponseDeserializer.class) public class DeleteNetworkPolicyRpcResponse { @Override @@ -25,4 +39,40 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteNetworkPolicyRpcResponse.class).toString(); } + + DeleteNetworkPolicyRpcResponsePb toPb() { + DeleteNetworkPolicyRpcResponsePb pb = new DeleteNetworkPolicyRpcResponsePb(); + + return pb; + } + + static DeleteNetworkPolicyRpcResponse fromPb(DeleteNetworkPolicyRpcResponsePb pb) { + DeleteNetworkPolicyRpcResponse model = new DeleteNetworkPolicyRpcResponse(); + + return model; + } + + public static class DeleteNetworkPolicyRpcResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteNetworkPolicyRpcResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteNetworkPolicyRpcResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNetworkPolicyRpcResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteNetworkPolicyRpcResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNetworkPolicyRpcResponsePb pb = + mapper.readValue(p, DeleteNetworkPolicyRpcResponsePb.class); + return DeleteNetworkPolicyRpcResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponsePb.java new file mode 100755 index 000000000..04c0cc0f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteNetworkPolicyRpcResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkPolicyRpcResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java index e6aeebdae..af4142ebf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a notification destination */ @Generated +@JsonSerialize( + using = + DeleteNotificationDestinationRequest.DeleteNotificationDestinationRequestSerializer.class) +@JsonDeserialize( + using = + DeleteNotificationDestinationRequest.DeleteNotificationDestinationRequestDeserializer.class) public class DeleteNotificationDestinationRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteNotificationDestinationRequest setId(String id) { this.id = id; @@ -39,4 +54,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteNotificationDestinationRequest.class).add("id", id).toString(); } + + DeleteNotificationDestinationRequestPb toPb() { + DeleteNotificationDestinationRequestPb pb = new DeleteNotificationDestinationRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteNotificationDestinationRequest fromPb(DeleteNotificationDestinationRequestPb pb) { + DeleteNotificationDestinationRequest model = new DeleteNotificationDestinationRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteNotificationDestinationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteNotificationDestinationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteNotificationDestinationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteNotificationDestinationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteNotificationDestinationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteNotificationDestinationRequestPb pb = + mapper.readValue(p, DeleteNotificationDestinationRequestPb.class); + return DeleteNotificationDestinationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequestPb.java new file mode 100755 index 000000000..d0b9fef68 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a notification destination */ +@Generated +class DeleteNotificationDestinationRequestPb { + @JsonIgnore private String id; + + public DeleteNotificationDestinationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteNotificationDestinationRequestPb that = (DeleteNotificationDestinationRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteNotificationDestinationRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java index c16f60779..4065e304c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java @@ -3,13 +3,26 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete Personal Compute setting */ @Generated +@JsonSerialize( + using = DeletePersonalComputeSettingRequest.DeletePersonalComputeSettingRequestSerializer.class) +@JsonDeserialize( + using = + DeletePersonalComputeSettingRequest.DeletePersonalComputeSettingRequestDeserializer.class) public class DeletePersonalComputeSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +32,6 @@ public class DeletePersonalComputeSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeletePersonalComputeSettingRequest setEtag(String etag) { @@ -49,4 +60,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePersonalComputeSettingRequest.class).add("etag", etag).toString(); } + + DeletePersonalComputeSettingRequestPb toPb() { + DeletePersonalComputeSettingRequestPb pb = new DeletePersonalComputeSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeletePersonalComputeSettingRequest fromPb(DeletePersonalComputeSettingRequestPb pb) { + DeletePersonalComputeSettingRequest model = new DeletePersonalComputeSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeletePersonalComputeSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePersonalComputeSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePersonalComputeSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePersonalComputeSettingRequestDeserializer + extends JsonDeserializer { + @Override + public DeletePersonalComputeSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePersonalComputeSettingRequestPb pb = + mapper.readValue(p, DeletePersonalComputeSettingRequestPb.class); + return DeletePersonalComputeSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequestPb.java new file mode 100755 index 000000000..179f2db79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete Personal Compute setting */ +@Generated +class DeletePersonalComputeSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeletePersonalComputeSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePersonalComputeSettingRequestPb that = (DeletePersonalComputeSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeletePersonalComputeSettingRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java index 58be66bd7..3330e7e81 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java @@ -4,11 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeletePersonalComputeSettingResponse.DeletePersonalComputeSettingResponseSerializer.class) +@JsonDeserialize( + using = + DeletePersonalComputeSettingResponse.DeletePersonalComputeSettingResponseDeserializer.class) public class DeletePersonalComputeSettingResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +33,6 @@ public class DeletePersonalComputeSettingResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeletePersonalComputeSettingResponse setEtag(String etag) { @@ -47,4 +61,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeletePersonalComputeSettingResponse.class).add("etag", etag).toString(); } + + DeletePersonalComputeSettingResponsePb toPb() { + DeletePersonalComputeSettingResponsePb pb = new DeletePersonalComputeSettingResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeletePersonalComputeSettingResponse fromPb(DeletePersonalComputeSettingResponsePb pb) { + DeletePersonalComputeSettingResponse model = new DeletePersonalComputeSettingResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeletePersonalComputeSettingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePersonalComputeSettingResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePersonalComputeSettingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePersonalComputeSettingResponseDeserializer + extends JsonDeserializer { + @Override + public DeletePersonalComputeSettingResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePersonalComputeSettingResponsePb pb = + mapper.readValue(p, DeletePersonalComputeSettingResponsePb.class); + return DeletePersonalComputeSettingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponsePb.java new file mode 100755 index 000000000..1b280bf64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeletePersonalComputeSettingResponsePb { + @JsonProperty("etag") + private String etag; + + public DeletePersonalComputeSettingResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePersonalComputeSettingResponsePb that = (DeletePersonalComputeSettingResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeletePersonalComputeSettingResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java index 3b74da91b..115011895 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java @@ -4,17 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a private endpoint rule */ @Generated +@JsonSerialize( + using = DeletePrivateEndpointRuleRequest.DeletePrivateEndpointRuleRequestSerializer.class) +@JsonDeserialize( + using = DeletePrivateEndpointRuleRequest.DeletePrivateEndpointRuleRequestDeserializer.class) public class DeletePrivateEndpointRuleRequest { /** Your Network Connectvity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; /** Your private endpoint rule ID. */ - @JsonIgnore private String privateEndpointRuleId; + private String privateEndpointRuleId; public DeletePrivateEndpointRuleRequest setNetworkConnectivityConfigId( String networkConnectivityConfigId) { @@ -56,4 +69,44 @@ public String toString() { .add("privateEndpointRuleId", privateEndpointRuleId) .toString(); } + + DeletePrivateEndpointRuleRequestPb toPb() { + DeletePrivateEndpointRuleRequestPb pb = new DeletePrivateEndpointRuleRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setPrivateEndpointRuleId(privateEndpointRuleId); + + return pb; + } + + static DeletePrivateEndpointRuleRequest fromPb(DeletePrivateEndpointRuleRequestPb pb) { + DeletePrivateEndpointRuleRequest model = new DeletePrivateEndpointRuleRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setPrivateEndpointRuleId(pb.getPrivateEndpointRuleId()); + + return model; + } + + public static class DeletePrivateEndpointRuleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeletePrivateEndpointRuleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePrivateEndpointRuleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeletePrivateEndpointRuleRequestDeserializer + extends JsonDeserializer { + @Override + public DeletePrivateEndpointRuleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePrivateEndpointRuleRequestPb pb = + mapper.readValue(p, DeletePrivateEndpointRuleRequestPb.class); + return DeletePrivateEndpointRuleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequestPb.java new file mode 100755 index 000000000..358fca205 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a private endpoint rule */ +@Generated +class DeletePrivateEndpointRuleRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + @JsonIgnore private String privateEndpointRuleId; + + public DeletePrivateEndpointRuleRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public DeletePrivateEndpointRuleRequestPb setPrivateEndpointRuleId(String privateEndpointRuleId) { + this.privateEndpointRuleId = privateEndpointRuleId; + return this; + } + + public String getPrivateEndpointRuleId() { + return privateEndpointRuleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePrivateEndpointRuleRequestPb that = (DeletePrivateEndpointRuleRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId, privateEndpointRuleId); + } + + @Override + public String toString() { + return new ToStringer(DeletePrivateEndpointRuleRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("privateEndpointRuleId", privateEndpointRuleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java index b8bc53d67..4ff43e0cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponsePb.java new file mode 100755 index 000000000..a19571a3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java index 9f893f43f..58bc61a58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the restrict workspace admins setting */ @Generated +@JsonSerialize( + using = + DeleteRestrictWorkspaceAdminsSettingRequest + .DeleteRestrictWorkspaceAdminsSettingRequestSerializer.class) +@JsonDeserialize( + using = + DeleteRestrictWorkspaceAdminsSettingRequest + .DeleteRestrictWorkspaceAdminsSettingRequestDeserializer.class) public class DeleteRestrictWorkspaceAdminsSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class DeleteRestrictWorkspaceAdminsSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteRestrictWorkspaceAdminsSettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteRestrictWorkspaceAdminsSettingRequestPb toPb() { + DeleteRestrictWorkspaceAdminsSettingRequestPb pb = + new DeleteRestrictWorkspaceAdminsSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteRestrictWorkspaceAdminsSettingRequest fromPb( + DeleteRestrictWorkspaceAdminsSettingRequestPb pb) { + DeleteRestrictWorkspaceAdminsSettingRequest model = + new DeleteRestrictWorkspaceAdminsSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteRestrictWorkspaceAdminsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteRestrictWorkspaceAdminsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteRestrictWorkspaceAdminsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRestrictWorkspaceAdminsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteRestrictWorkspaceAdminsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRestrictWorkspaceAdminsSettingRequestPb pb = + mapper.readValue(p, DeleteRestrictWorkspaceAdminsSettingRequestPb.class); + return DeleteRestrictWorkspaceAdminsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequestPb.java new file mode 100755 index 000000000..07f8ec1a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the restrict workspace admins setting */ +@Generated +class DeleteRestrictWorkspaceAdminsSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteRestrictWorkspaceAdminsSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRestrictWorkspaceAdminsSettingRequestPb that = + (DeleteRestrictWorkspaceAdminsSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteRestrictWorkspaceAdminsSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java index 8d5bf92a5..3da1a5721 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java @@ -4,11 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = + DeleteRestrictWorkspaceAdminsSettingResponse + .DeleteRestrictWorkspaceAdminsSettingResponseSerializer.class) +@JsonDeserialize( + using = + DeleteRestrictWorkspaceAdminsSettingResponse + .DeleteRestrictWorkspaceAdminsSettingResponseDeserializer.class) public class DeleteRestrictWorkspaceAdminsSettingResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +35,6 @@ public class DeleteRestrictWorkspaceAdminsSettingResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteRestrictWorkspaceAdminsSettingResponse setEtag(String etag) { @@ -50,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + DeleteRestrictWorkspaceAdminsSettingResponsePb toPb() { + DeleteRestrictWorkspaceAdminsSettingResponsePb pb = + new DeleteRestrictWorkspaceAdminsSettingResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteRestrictWorkspaceAdminsSettingResponse fromPb( + DeleteRestrictWorkspaceAdminsSettingResponsePb pb) { + DeleteRestrictWorkspaceAdminsSettingResponse model = + new DeleteRestrictWorkspaceAdminsSettingResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteRestrictWorkspaceAdminsSettingResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteRestrictWorkspaceAdminsSettingResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteRestrictWorkspaceAdminsSettingResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRestrictWorkspaceAdminsSettingResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteRestrictWorkspaceAdminsSettingResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRestrictWorkspaceAdminsSettingResponsePb pb = + mapper.readValue(p, DeleteRestrictWorkspaceAdminsSettingResponsePb.class); + return DeleteRestrictWorkspaceAdminsSettingResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponsePb.java new file mode 100755 index 000000000..509b76a41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteRestrictWorkspaceAdminsSettingResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteRestrictWorkspaceAdminsSettingResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRestrictWorkspaceAdminsSettingResponsePb that = + (DeleteRestrictWorkspaceAdminsSettingResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteRestrictWorkspaceAdminsSettingResponsePb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java index 3a5c3214a..7aa43ef33 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete the SQL Results Download setting */ @Generated +@JsonSerialize( + using = DeleteSqlResultsDownloadRequest.DeleteSqlResultsDownloadRequestSerializer.class) +@JsonDeserialize( + using = DeleteSqlResultsDownloadRequest.DeleteSqlResultsDownloadRequestDeserializer.class) public class DeleteSqlResultsDownloadRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class DeleteSqlResultsDownloadRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public DeleteSqlResultsDownloadRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteSqlResultsDownloadRequest.class).add("etag", etag).toString(); } + + DeleteSqlResultsDownloadRequestPb toPb() { + DeleteSqlResultsDownloadRequestPb pb = new DeleteSqlResultsDownloadRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteSqlResultsDownloadRequest fromPb(DeleteSqlResultsDownloadRequestPb pb) { + DeleteSqlResultsDownloadRequest model = new DeleteSqlResultsDownloadRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteSqlResultsDownloadRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSqlResultsDownloadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSqlResultsDownloadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSqlResultsDownloadRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteSqlResultsDownloadRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSqlResultsDownloadRequestPb pb = + mapper.readValue(p, DeleteSqlResultsDownloadRequestPb.class); + return DeleteSqlResultsDownloadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequestPb.java new file mode 100755 index 000000000..2d5d0707c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the SQL Results Download setting */ +@Generated +class DeleteSqlResultsDownloadRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteSqlResultsDownloadRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSqlResultsDownloadRequestPb that = (DeleteSqlResultsDownloadRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteSqlResultsDownloadRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponse.java index bc2957210..1e96302bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponse.java @@ -4,11 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The etag is returned. */ @Generated +@JsonSerialize( + using = DeleteSqlResultsDownloadResponse.DeleteSqlResultsDownloadResponseSerializer.class) +@JsonDeserialize( + using = DeleteSqlResultsDownloadResponse.DeleteSqlResultsDownloadResponseDeserializer.class) public class DeleteSqlResultsDownloadResponse { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -18,7 +31,6 @@ public class DeleteSqlResultsDownloadResponse { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonProperty("etag") private String etag; public DeleteSqlResultsDownloadResponse setEtag(String etag) { @@ -47,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteSqlResultsDownloadResponse.class).add("etag", etag).toString(); } + + DeleteSqlResultsDownloadResponsePb toPb() { + DeleteSqlResultsDownloadResponsePb pb = new DeleteSqlResultsDownloadResponsePb(); + pb.setEtag(etag); + + return pb; + } + + static DeleteSqlResultsDownloadResponse fromPb(DeleteSqlResultsDownloadResponsePb pb) { + DeleteSqlResultsDownloadResponse model = new DeleteSqlResultsDownloadResponse(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class DeleteSqlResultsDownloadResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteSqlResultsDownloadResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSqlResultsDownloadResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSqlResultsDownloadResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteSqlResultsDownloadResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSqlResultsDownloadResponsePb pb = + mapper.readValue(p, DeleteSqlResultsDownloadResponsePb.class); + return DeleteSqlResultsDownloadResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponsePb.java new file mode 100755 index 000000000..f678c8946 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +class DeleteSqlResultsDownloadResponsePb { + @JsonProperty("etag") + private String etag; + + public DeleteSqlResultsDownloadResponsePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSqlResultsDownloadResponsePb that = (DeleteSqlResultsDownloadResponsePb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteSqlResultsDownloadResponsePb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java index 98dcf2ab2..d5647e00e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a token */ @Generated +@JsonSerialize(using = DeleteTokenManagementRequest.DeleteTokenManagementRequestSerializer.class) +@JsonDeserialize( + using = DeleteTokenManagementRequest.DeleteTokenManagementRequestDeserializer.class) public class DeleteTokenManagementRequest { /** The ID of the token to revoke. */ - @JsonIgnore private String tokenId; + private String tokenId; public DeleteTokenManagementRequest setTokenId(String tokenId) { this.tokenId = tokenId; @@ -39,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteTokenManagementRequest.class).add("tokenId", tokenId).toString(); } + + DeleteTokenManagementRequestPb toPb() { + DeleteTokenManagementRequestPb pb = new DeleteTokenManagementRequestPb(); + pb.setTokenId(tokenId); + + return pb; + } + + static DeleteTokenManagementRequest fromPb(DeleteTokenManagementRequestPb pb) { + DeleteTokenManagementRequest model = new DeleteTokenManagementRequest(); + model.setTokenId(pb.getTokenId()); + + return model; + } + + public static class DeleteTokenManagementRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteTokenManagementRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteTokenManagementRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteTokenManagementRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteTokenManagementRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteTokenManagementRequestPb pb = mapper.readValue(p, DeleteTokenManagementRequestPb.class); + return DeleteTokenManagementRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequestPb.java new file mode 100755 index 000000000..5df8bf20b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a token */ +@Generated +class DeleteTokenManagementRequestPb { + @JsonIgnore private String tokenId; + + public DeleteTokenManagementRequestPb setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTokenManagementRequestPb that = (DeleteTokenManagementRequestPb) o; + return Objects.equals(tokenId, that.tokenId); + } + + @Override + public int hashCode() { + return Objects.hash(tokenId); + } + + @Override + public String toString() { + return new ToStringer(DeleteTokenManagementRequestPb.class).add("tokenId", tokenId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java index c9ba4ab60..bb3856176 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DisableLegacyAccess.DisableLegacyAccessSerializer.class) +@JsonDeserialize(using = DisableLegacyAccess.DisableLegacyAccessDeserializer.class) public class DisableLegacyAccess { /** */ - @JsonProperty("disable_legacy_access") private BooleanMessage disableLegacyAccess; /** @@ -21,7 +31,6 @@ public class DisableLegacyAccess { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class DisableLegacyAccess { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public DisableLegacyAccess setDisableLegacyAccess(BooleanMessage disableLegacyAccess) { @@ -83,4 +91,43 @@ public String toString() { .add("settingName", settingName) .toString(); } + + DisableLegacyAccessPb toPb() { + DisableLegacyAccessPb pb = new DisableLegacyAccessPb(); + pb.setDisableLegacyAccess(disableLegacyAccess); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static DisableLegacyAccess fromPb(DisableLegacyAccessPb pb) { + DisableLegacyAccess model = new DisableLegacyAccess(); + model.setDisableLegacyAccess(pb.getDisableLegacyAccess()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class DisableLegacyAccessSerializer extends JsonSerializer { + @Override + public void serialize(DisableLegacyAccess value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DisableLegacyAccessPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DisableLegacyAccessDeserializer + extends JsonDeserializer { + @Override + public DisableLegacyAccess deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DisableLegacyAccessPb pb = mapper.readValue(p, DisableLegacyAccessPb.class); + return DisableLegacyAccess.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java index 341d1925f..696aff38e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java @@ -21,7 +21,7 @@ public DeleteDisableLegacyAccessResponse delete(DeleteDisableLegacyAccessRequest String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDisableLegacyAccessResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public DisableLegacyAccess get(GetDisableLegacyAccessRequest request) { String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DisableLegacyAccess.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public DisableLegacyAccess update(UpdateDisableLegacyAccessRequest request) { String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DisableLegacyAccess.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessPb.java new file mode 100755 index 000000000..41b85721f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DisableLegacyAccessPb { + @JsonProperty("disable_legacy_access") + private BooleanMessage disableLegacyAccess; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public DisableLegacyAccessPb setDisableLegacyAccess(BooleanMessage disableLegacyAccess) { + this.disableLegacyAccess = disableLegacyAccess; + return this; + } + + public BooleanMessage getDisableLegacyAccess() { + return disableLegacyAccess; + } + + public DisableLegacyAccessPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DisableLegacyAccessPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DisableLegacyAccessPb that = (DisableLegacyAccessPb) o; + return Objects.equals(disableLegacyAccess, that.disableLegacyAccess) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(disableLegacyAccess, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(DisableLegacyAccessPb.class) + .add("disableLegacyAccess", disableLegacyAccess) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java index 71ee5867c..0d4306d9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DisableLegacyDbfs.DisableLegacyDbfsSerializer.class) +@JsonDeserialize(using = DisableLegacyDbfs.DisableLegacyDbfsDeserializer.class) public class DisableLegacyDbfs { /** */ - @JsonProperty("disable_legacy_dbfs") private BooleanMessage disableLegacyDbfs; /** @@ -21,7 +31,6 @@ public class DisableLegacyDbfs { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class DisableLegacyDbfs { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public DisableLegacyDbfs setDisableLegacyDbfs(BooleanMessage disableLegacyDbfs) { @@ -83,4 +91,42 @@ public String toString() { .add("settingName", settingName) .toString(); } + + DisableLegacyDbfsPb toPb() { + DisableLegacyDbfsPb pb = new DisableLegacyDbfsPb(); + pb.setDisableLegacyDbfs(disableLegacyDbfs); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static DisableLegacyDbfs fromPb(DisableLegacyDbfsPb pb) { + DisableLegacyDbfs model = new DisableLegacyDbfs(); + model.setDisableLegacyDbfs(pb.getDisableLegacyDbfs()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class DisableLegacyDbfsSerializer extends JsonSerializer { + @Override + public void serialize(DisableLegacyDbfs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DisableLegacyDbfsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DisableLegacyDbfsDeserializer extends JsonDeserializer { + @Override + public DisableLegacyDbfs deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DisableLegacyDbfsPb pb = mapper.readValue(p, DisableLegacyDbfsPb.class); + return DisableLegacyDbfs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java index f314a56f3..c48854757 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java @@ -21,7 +21,7 @@ public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest req String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDisableLegacyDbfsResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public DisableLegacyDbfs get(GetDisableLegacyDbfsRequest request) { String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DisableLegacyDbfs.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest request) { String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DisableLegacyDbfs.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsPb.java new file mode 100755 index 000000000..b83acb8bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DisableLegacyDbfsPb { + @JsonProperty("disable_legacy_dbfs") + private BooleanMessage disableLegacyDbfs; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public DisableLegacyDbfsPb setDisableLegacyDbfs(BooleanMessage disableLegacyDbfs) { + this.disableLegacyDbfs = disableLegacyDbfs; + return this; + } + + public BooleanMessage getDisableLegacyDbfs() { + return disableLegacyDbfs; + } + + public DisableLegacyDbfsPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DisableLegacyDbfsPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DisableLegacyDbfsPb that = (DisableLegacyDbfsPb) o; + return Objects.equals(disableLegacyDbfs, that.disableLegacyDbfs) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(disableLegacyDbfs, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(DisableLegacyDbfsPb.class) + .add("disableLegacyDbfs", disableLegacyDbfs) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java index fed2ee95c..d5092feb8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DisableLegacyFeatures.DisableLegacyFeaturesSerializer.class) +@JsonDeserialize(using = DisableLegacyFeatures.DisableLegacyFeaturesDeserializer.class) public class DisableLegacyFeatures { /** */ - @JsonProperty("disable_legacy_features") private BooleanMessage disableLegacyFeatures; /** @@ -21,7 +31,6 @@ public class DisableLegacyFeatures { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class DisableLegacyFeatures { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public DisableLegacyFeatures setDisableLegacyFeatures(BooleanMessage disableLegacyFeatures) { @@ -83,4 +91,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + DisableLegacyFeaturesPb toPb() { + DisableLegacyFeaturesPb pb = new DisableLegacyFeaturesPb(); + pb.setDisableLegacyFeatures(disableLegacyFeatures); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static DisableLegacyFeatures fromPb(DisableLegacyFeaturesPb pb) { + DisableLegacyFeatures model = new DisableLegacyFeatures(); + model.setDisableLegacyFeatures(pb.getDisableLegacyFeatures()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class DisableLegacyFeaturesSerializer + extends JsonSerializer { + @Override + public void serialize( + DisableLegacyFeatures value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DisableLegacyFeaturesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DisableLegacyFeaturesDeserializer + extends JsonDeserializer { + @Override + public DisableLegacyFeatures deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DisableLegacyFeaturesPb pb = mapper.readValue(p, DisableLegacyFeaturesPb.class); + return DisableLegacyFeatures.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java index d97523df1..1e40779d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java @@ -24,7 +24,7 @@ public DeleteDisableLegacyFeaturesResponse delete(DeleteDisableLegacyFeaturesReq apiClient.configuredAccountID()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDisableLegacyFeaturesResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public DisableLegacyFeatures get(GetDisableLegacyFeaturesRequest request) { apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DisableLegacyFeatures.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public DisableLegacyFeatures update(UpdateDisableLegacyFeaturesRequest request) apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, DisableLegacyFeatures.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesPb.java new file mode 100755 index 000000000..5bc06d11c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DisableLegacyFeaturesPb { + @JsonProperty("disable_legacy_features") + private BooleanMessage disableLegacyFeatures; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public DisableLegacyFeaturesPb setDisableLegacyFeatures(BooleanMessage disableLegacyFeatures) { + this.disableLegacyFeatures = disableLegacyFeatures; + return this; + } + + public BooleanMessage getDisableLegacyFeatures() { + return disableLegacyFeatures; + } + + public DisableLegacyFeaturesPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DisableLegacyFeaturesPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DisableLegacyFeaturesPb that = (DisableLegacyFeaturesPb) o; + return Objects.equals(disableLegacyFeatures, that.disableLegacyFeatures) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(disableLegacyFeatures, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(DisableLegacyFeaturesPb.class) + .add("disableLegacyFeatures", disableLegacyFeatures) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java index a5eab3861..c0ea8c424 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,9 +22,10 @@ * the format expected by the dataplane, see networkconfig.textproto). */ @Generated +@JsonSerialize(using = EgressNetworkPolicy.EgressNetworkPolicySerializer.class) +@JsonDeserialize(using = EgressNetworkPolicy.EgressNetworkPolicyDeserializer.class) public class EgressNetworkPolicy { /** The access policy enforced for egress traffic to the internet. */ - @JsonProperty("internet_access") private EgressNetworkPolicyInternetAccessPolicy internetAccess; public EgressNetworkPolicy setInternetAccess( @@ -47,4 +57,39 @@ public String toString() { .add("internetAccess", internetAccess) .toString(); } + + EgressNetworkPolicyPb toPb() { + EgressNetworkPolicyPb pb = new EgressNetworkPolicyPb(); + pb.setInternetAccess(internetAccess); + + return pb; + } + + static EgressNetworkPolicy fromPb(EgressNetworkPolicyPb pb) { + EgressNetworkPolicy model = new EgressNetworkPolicy(); + model.setInternetAccess(pb.getInternetAccess()); + + return model; + } + + public static class EgressNetworkPolicySerializer extends JsonSerializer { + @Override + public void serialize(EgressNetworkPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EgressNetworkPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyPb pb = mapper.readValue(p, EgressNetworkPolicyPb.class); + return EgressNetworkPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java index 37029d2c5..d8980ff19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java @@ -4,24 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + EgressNetworkPolicyInternetAccessPolicy.EgressNetworkPolicyInternetAccessPolicySerializer + .class) +@JsonDeserialize( + using = + EgressNetworkPolicyInternetAccessPolicy.EgressNetworkPolicyInternetAccessPolicyDeserializer + .class) public class EgressNetworkPolicyInternetAccessPolicy { /** */ - @JsonProperty("allowed_internet_destinations") private Collection allowedInternetDestinations; /** */ - @JsonProperty("allowed_storage_destinations") private Collection allowedStorageDestinations; /** Optional. If not specified, assume the policy is enforced for all workloads. */ - @JsonProperty("log_only_mode") private EgressNetworkPolicyInternetAccessPolicyLogOnlyMode logOnlyMode; /** @@ -31,7 +45,6 @@ public class EgressNetworkPolicyInternetAccessPolicy { * external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via * private link. */ - @JsonProperty("restriction_mode") private EgressNetworkPolicyInternetAccessPolicyRestrictionMode restrictionMode; public EgressNetworkPolicyInternetAccessPolicy setAllowedInternetDestinations( @@ -104,4 +117,51 @@ public String toString() { .add("restrictionMode", restrictionMode) .toString(); } + + EgressNetworkPolicyInternetAccessPolicyPb toPb() { + EgressNetworkPolicyInternetAccessPolicyPb pb = new EgressNetworkPolicyInternetAccessPolicyPb(); + pb.setAllowedInternetDestinations(allowedInternetDestinations); + pb.setAllowedStorageDestinations(allowedStorageDestinations); + pb.setLogOnlyMode(logOnlyMode); + pb.setRestrictionMode(restrictionMode); + + return pb; + } + + static EgressNetworkPolicyInternetAccessPolicy fromPb( + EgressNetworkPolicyInternetAccessPolicyPb pb) { + EgressNetworkPolicyInternetAccessPolicy model = new EgressNetworkPolicyInternetAccessPolicy(); + model.setAllowedInternetDestinations(pb.getAllowedInternetDestinations()); + model.setAllowedStorageDestinations(pb.getAllowedStorageDestinations()); + model.setLogOnlyMode(pb.getLogOnlyMode()); + model.setRestrictionMode(pb.getRestrictionMode()); + + return model; + } + + public static class EgressNetworkPolicyInternetAccessPolicySerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyInternetAccessPolicy value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyInternetAccessPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyInternetAccessPolicyDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyInternetAccessPolicy deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyInternetAccessPolicyPb pb = + mapper.readValue(p, EgressNetworkPolicyInternetAccessPolicyPb.class); + return EgressNetworkPolicyInternetAccessPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java index 6c97950e2..7e90c8646 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,9 +22,16 @@ * support host names and IP addresses. */ @Generated +@JsonSerialize( + using = + EgressNetworkPolicyInternetAccessPolicyInternetDestination + .EgressNetworkPolicyInternetAccessPolicyInternetDestinationSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyInternetAccessPolicyInternetDestination + .EgressNetworkPolicyInternetAccessPolicyInternetDestinationDeserializer.class) public class EgressNetworkPolicyInternetAccessPolicyInternetDestination { /** */ - @JsonProperty("destination") private String destination; /** @@ -24,13 +40,11 @@ public class EgressNetworkPolicyInternetAccessPolicyInternetDestination { * be set to TCP by default and hidden from the user. In the future, users may be able to select * HTTP filtering (i.e. SNI based filtering, filtering by FQDN). */ - @JsonProperty("protocol") private EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol protocol; /** */ - @JsonProperty("type") private EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType typeValue; @@ -92,4 +106,51 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb toPb() { + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb pb = + new EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb(); + pb.setDestination(destination); + pb.setProtocol(protocol); + pb.setType(typeValue); + + return pb; + } + + static EgressNetworkPolicyInternetAccessPolicyInternetDestination fromPb( + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb pb) { + EgressNetworkPolicyInternetAccessPolicyInternetDestination model = + new EgressNetworkPolicyInternetAccessPolicyInternetDestination(); + model.setDestination(pb.getDestination()); + model.setProtocol(pb.getProtocol()); + model.setType(pb.getType()); + + return model; + } + + public static class EgressNetworkPolicyInternetAccessPolicyInternetDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyInternetAccessPolicyInternetDestination value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyInternetAccessPolicyInternetDestinationDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyInternetAccessPolicyInternetDestination deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb pb = + mapper.readValue(p, EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb.class); + return EgressNetworkPolicyInternetAccessPolicyInternetDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb.java new file mode 100755 index 000000000..b5b2365b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Users can specify accessible internet destinations when outbound access is restricted. We only + * support domain name (FQDN) destinations for the time being, though going forwards we want to + * support host names and IP addresses. + */ +@Generated +class EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb { + @JsonProperty("destination") + private String destination; + + @JsonProperty("protocol") + private + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + protocol; + + @JsonProperty("type") + private EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType + typeValue; + + public EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb setDestination( + String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + public EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb setProtocol( + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + protocol) { + this.protocol = protocol; + return this; + } + + public + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + getProtocol() { + return protocol; + } + + public EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb setType( + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType typeValue) { + this.typeValue = typeValue; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType + getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb that = + (EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb) o; + return Objects.equals(destination, that.destination) + && Objects.equals(protocol, that.protocol) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(destination, protocol, typeValue); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyInternetAccessPolicyInternetDestinationPb.class) + .add("destination", destination) + .add("protocol", protocol) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.java index 744cfe706..d58e0cabe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.java @@ -4,18 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode + .EgressNetworkPolicyInternetAccessPolicyLogOnlyModeSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode + .EgressNetworkPolicyInternetAccessPolicyLogOnlyModeDeserializer.class) public class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode { /** */ - @JsonProperty("log_only_mode_type") private EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType logOnlyModeType; /** */ - @JsonProperty("workloads") private Collection workloads; public EgressNetworkPolicyInternetAccessPolicyLogOnlyMode setLogOnlyModeType( @@ -60,4 +75,49 @@ public String toString() { .add("workloads", workloads) .toString(); } + + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb toPb() { + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb pb = + new EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb(); + pb.setLogOnlyModeType(logOnlyModeType); + pb.setWorkloads(workloads); + + return pb; + } + + static EgressNetworkPolicyInternetAccessPolicyLogOnlyMode fromPb( + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb pb) { + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode model = + new EgressNetworkPolicyInternetAccessPolicyLogOnlyMode(); + model.setLogOnlyModeType(pb.getLogOnlyModeType()); + model.setWorkloads(pb.getWorkloads()); + + return model; + } + + public static class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyInternetAccessPolicyLogOnlyMode deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb pb = + mapper.readValue(p, EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb.class); + return EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb.java new file mode 100755 index 000000000..46a2219c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb { + @JsonProperty("log_only_mode_type") + private EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType logOnlyModeType; + + @JsonProperty("workloads") + private Collection workloads; + + public EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb setLogOnlyModeType( + EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType logOnlyModeType) { + this.logOnlyModeType = logOnlyModeType; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType getLogOnlyModeType() { + return logOnlyModeType; + } + + public EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb setWorkloads( + Collection workloads) { + this.workloads = workloads; + return this; + } + + public Collection getWorkloads() { + return workloads; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb that = + (EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb) o; + return Objects.equals(logOnlyModeType, that.logOnlyModeType) + && Objects.equals(workloads, that.workloads); + } + + @Override + public int hashCode() { + return Objects.hash(logOnlyModeType, workloads); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyInternetAccessPolicyLogOnlyModePb.class) + .add("logOnlyModeType", logOnlyModeType) + .add("workloads", workloads) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyPb.java new file mode 100755 index 000000000..cb075cb70 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyPb.java @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EgressNetworkPolicyInternetAccessPolicyPb { + @JsonProperty("allowed_internet_destinations") + private Collection + allowedInternetDestinations; + + @JsonProperty("allowed_storage_destinations") + private Collection + allowedStorageDestinations; + + @JsonProperty("log_only_mode") + private EgressNetworkPolicyInternetAccessPolicyLogOnlyMode logOnlyMode; + + @JsonProperty("restriction_mode") + private EgressNetworkPolicyInternetAccessPolicyRestrictionMode restrictionMode; + + public EgressNetworkPolicyInternetAccessPolicyPb setAllowedInternetDestinations( + Collection + allowedInternetDestinations) { + this.allowedInternetDestinations = allowedInternetDestinations; + return this; + } + + public Collection + getAllowedInternetDestinations() { + return allowedInternetDestinations; + } + + public EgressNetworkPolicyInternetAccessPolicyPb setAllowedStorageDestinations( + Collection + allowedStorageDestinations) { + this.allowedStorageDestinations = allowedStorageDestinations; + return this; + } + + public Collection + getAllowedStorageDestinations() { + return allowedStorageDestinations; + } + + public EgressNetworkPolicyInternetAccessPolicyPb setLogOnlyMode( + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode logOnlyMode) { + this.logOnlyMode = logOnlyMode; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicyLogOnlyMode getLogOnlyMode() { + return logOnlyMode; + } + + public EgressNetworkPolicyInternetAccessPolicyPb setRestrictionMode( + EgressNetworkPolicyInternetAccessPolicyRestrictionMode restrictionMode) { + this.restrictionMode = restrictionMode; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicyRestrictionMode getRestrictionMode() { + return restrictionMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyInternetAccessPolicyPb that = (EgressNetworkPolicyInternetAccessPolicyPb) o; + return Objects.equals(allowedInternetDestinations, that.allowedInternetDestinations) + && Objects.equals(allowedStorageDestinations, that.allowedStorageDestinations) + && Objects.equals(logOnlyMode, that.logOnlyMode) + && Objects.equals(restrictionMode, that.restrictionMode); + } + + @Override + public int hashCode() { + return Objects.hash( + allowedInternetDestinations, allowedStorageDestinations, logOnlyMode, restrictionMode); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyInternetAccessPolicyPb.class) + .add("allowedInternetDestinations", allowedInternetDestinations) + .add("allowedStorageDestinations", allowedStorageDestinations) + .add("logOnlyMode", logOnlyMode) + .add("restrictionMode", restrictionMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java index f1b8a4a59..10ec3b082 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java @@ -4,43 +4,52 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Users can specify accessible storage destinations. */ @Generated +@JsonSerialize( + using = + EgressNetworkPolicyInternetAccessPolicyStorageDestination + .EgressNetworkPolicyInternetAccessPolicyStorageDestinationSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyInternetAccessPolicyStorageDestination + .EgressNetworkPolicyInternetAccessPolicyStorageDestinationDeserializer.class) public class EgressNetworkPolicyInternetAccessPolicyStorageDestination { /** */ - @JsonProperty("allowed_paths") private Collection allowedPaths; /** */ - @JsonProperty("azure_container") private String azureContainer; /** */ - @JsonProperty("azure_dns_zone") private String azureDnsZone; /** */ - @JsonProperty("azure_storage_account") private String azureStorageAccount; /** */ - @JsonProperty("azure_storage_service") private String azureStorageService; /** */ - @JsonProperty("bucket_name") private String bucketName; /** */ - @JsonProperty("region") private String region; /** */ - @JsonProperty("type") private EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType typeValue; public EgressNetworkPolicyInternetAccessPolicyStorageDestination setAllowedPaths( @@ -164,4 +173,61 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb toPb() { + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb pb = + new EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb(); + pb.setAllowedPaths(allowedPaths); + pb.setAzureContainer(azureContainer); + pb.setAzureDnsZone(azureDnsZone); + pb.setAzureStorageAccount(azureStorageAccount); + pb.setAzureStorageService(azureStorageService); + pb.setBucketName(bucketName); + pb.setRegion(region); + pb.setType(typeValue); + + return pb; + } + + static EgressNetworkPolicyInternetAccessPolicyStorageDestination fromPb( + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb pb) { + EgressNetworkPolicyInternetAccessPolicyStorageDestination model = + new EgressNetworkPolicyInternetAccessPolicyStorageDestination(); + model.setAllowedPaths(pb.getAllowedPaths()); + model.setAzureContainer(pb.getAzureContainer()); + model.setAzureDnsZone(pb.getAzureDnsZone()); + model.setAzureStorageAccount(pb.getAzureStorageAccount()); + model.setAzureStorageService(pb.getAzureStorageService()); + model.setBucketName(pb.getBucketName()); + model.setRegion(pb.getRegion()); + model.setType(pb.getType()); + + return model; + } + + public static class EgressNetworkPolicyInternetAccessPolicyStorageDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyInternetAccessPolicyStorageDestination value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyInternetAccessPolicyStorageDestinationDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyInternetAccessPolicyStorageDestination deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb pb = + mapper.readValue(p, EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb.class); + return EgressNetworkPolicyInternetAccessPolicyStorageDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb.java new file mode 100755 index 000000000..ef739669f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb.java @@ -0,0 +1,159 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Users can specify accessible storage destinations. */ +@Generated +class EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb { + @JsonProperty("allowed_paths") + private Collection allowedPaths; + + @JsonProperty("azure_container") + private String azureContainer; + + @JsonProperty("azure_dns_zone") + private String azureDnsZone; + + @JsonProperty("azure_storage_account") + private String azureStorageAccount; + + @JsonProperty("azure_storage_service") + private String azureStorageService; + + @JsonProperty("bucket_name") + private String bucketName; + + @JsonProperty("region") + private String region; + + @JsonProperty("type") + private EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType typeValue; + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setAllowedPaths( + Collection allowedPaths) { + this.allowedPaths = allowedPaths; + return this; + } + + public Collection getAllowedPaths() { + return allowedPaths; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setAzureContainer( + String azureContainer) { + this.azureContainer = azureContainer; + return this; + } + + public String getAzureContainer() { + return azureContainer; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setAzureDnsZone( + String azureDnsZone) { + this.azureDnsZone = azureDnsZone; + return this; + } + + public String getAzureDnsZone() { + return azureDnsZone; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setAzureStorageAccount( + String azureStorageAccount) { + this.azureStorageAccount = azureStorageAccount; + return this; + } + + public String getAzureStorageAccount() { + return azureStorageAccount; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setAzureStorageService( + String azureStorageService) { + this.azureStorageService = azureStorageService; + return this; + } + + public String getAzureStorageService() { + return azureStorageService; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setBucketName( + String bucketName) { + this.bucketName = bucketName; + return this; + } + + public String getBucketName() { + return bucketName; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb setType( + EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType typeValue) { + this.typeValue = typeValue; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb that = + (EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb) o; + return Objects.equals(allowedPaths, that.allowedPaths) + && Objects.equals(azureContainer, that.azureContainer) + && Objects.equals(azureDnsZone, that.azureDnsZone) + && Objects.equals(azureStorageAccount, that.azureStorageAccount) + && Objects.equals(azureStorageService, that.azureStorageService) + && Objects.equals(bucketName, that.bucketName) + && Objects.equals(region, that.region) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash( + allowedPaths, + azureContainer, + azureDnsZone, + azureStorageAccount, + azureStorageService, + bucketName, + region, + typeValue); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyInternetAccessPolicyStorageDestinationPb.class) + .add("allowedPaths", allowedPaths) + .add("azureContainer", azureContainer) + .add("azureDnsZone", azureDnsZone) + .add("azureStorageAccount", azureStorageAccount) + .add("azureStorageService", azureStorageService) + .add("bucketName", bucketName) + .add("region", region) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java index 886b66041..75b002e4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java @@ -4,17 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + EgressNetworkPolicyNetworkAccessPolicy.EgressNetworkPolicyNetworkAccessPolicySerializer + .class) +@JsonDeserialize( + using = + EgressNetworkPolicyNetworkAccessPolicy.EgressNetworkPolicyNetworkAccessPolicyDeserializer + .class) public class EgressNetworkPolicyNetworkAccessPolicy { /** * List of internet destinations that serverless workloads are allowed to access when in * RESTRICTED_ACCESS mode. */ - @JsonProperty("allowed_internet_destinations") private Collection allowedInternetDestinations; @@ -22,16 +38,13 @@ public class EgressNetworkPolicyNetworkAccessPolicy { * List of storage destinations that serverless workloads are allowed to access when in * RESTRICTED_ACCESS mode. */ - @JsonProperty("allowed_storage_destinations") private Collection allowedStorageDestinations; /** Optional. When policy_enforcement is not provided, we default to ENFORCE_MODE_ALL_SERVICES */ - @JsonProperty("policy_enforcement") private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement; /** The restriction mode that controls how serverless workloads can access the internet. */ - @JsonProperty("restriction_mode") private EgressNetworkPolicyNetworkAccessPolicyRestrictionMode restrictionMode; public EgressNetworkPolicyNetworkAccessPolicy setAllowedInternetDestinations( @@ -107,4 +120,51 @@ public String toString() { .add("restrictionMode", restrictionMode) .toString(); } + + EgressNetworkPolicyNetworkAccessPolicyPb toPb() { + EgressNetworkPolicyNetworkAccessPolicyPb pb = new EgressNetworkPolicyNetworkAccessPolicyPb(); + pb.setAllowedInternetDestinations(allowedInternetDestinations); + pb.setAllowedStorageDestinations(allowedStorageDestinations); + pb.setPolicyEnforcement(policyEnforcement); + pb.setRestrictionMode(restrictionMode); + + return pb; + } + + static EgressNetworkPolicyNetworkAccessPolicy fromPb( + EgressNetworkPolicyNetworkAccessPolicyPb pb) { + EgressNetworkPolicyNetworkAccessPolicy model = new EgressNetworkPolicyNetworkAccessPolicy(); + model.setAllowedInternetDestinations(pb.getAllowedInternetDestinations()); + model.setAllowedStorageDestinations(pb.getAllowedStorageDestinations()); + model.setPolicyEnforcement(pb.getPolicyEnforcement()); + model.setRestrictionMode(pb.getRestrictionMode()); + + return model; + } + + public static class EgressNetworkPolicyNetworkAccessPolicySerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyNetworkAccessPolicy value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyNetworkAccessPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyNetworkAccessPolicyDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyNetworkAccessPolicy deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyNetworkAccessPolicyPb pb = + mapper.readValue(p, EgressNetworkPolicyNetworkAccessPolicyPb.class); + return EgressNetworkPolicyNetworkAccessPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java index 4fbbea293..cb8e0a56a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -13,16 +22,22 @@ * support to host names and IP addresses. */ @Generated +@JsonSerialize( + using = + EgressNetworkPolicyNetworkAccessPolicyInternetDestination + .EgressNetworkPolicyNetworkAccessPolicyInternetDestinationSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyNetworkAccessPolicyInternetDestination + .EgressNetworkPolicyNetworkAccessPolicyInternetDestinationDeserializer.class) public class EgressNetworkPolicyNetworkAccessPolicyInternetDestination { /** * The internet destination to which access will be allowed. Format dependent on the destination * type. */ - @JsonProperty("destination") private String destination; /** The type of internet destination. Currently only DNS_NAME is supported. */ - @JsonProperty("internet_destination_type") private EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType internetDestinationType; @@ -70,4 +85,49 @@ public String toString() { .add("internetDestinationType", internetDestinationType) .toString(); } + + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb toPb() { + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb pb = + new EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb(); + pb.setDestination(destination); + pb.setInternetDestinationType(internetDestinationType); + + return pb; + } + + static EgressNetworkPolicyNetworkAccessPolicyInternetDestination fromPb( + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb pb) { + EgressNetworkPolicyNetworkAccessPolicyInternetDestination model = + new EgressNetworkPolicyNetworkAccessPolicyInternetDestination(); + model.setDestination(pb.getDestination()); + model.setInternetDestinationType(pb.getInternetDestinationType()); + + return model; + } + + public static class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyNetworkAccessPolicyInternetDestination value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyNetworkAccessPolicyInternetDestination deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb pb = + mapper.readValue(p, EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb.class); + return EgressNetworkPolicyNetworkAccessPolicyInternetDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb.java new file mode 100755 index 000000000..8064bc61c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb.java @@ -0,0 +1,68 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Users can specify accessible internet destinations when outbound access is restricted. We only + * support DNS_NAME (FQDN format) destinations for the time being. Going forward we may extend + * support to host names and IP addresses. + */ +@Generated +class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb { + @JsonProperty("destination") + private String destination; + + @JsonProperty("internet_destination_type") + private EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + internetDestinationType; + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb setDestination( + String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb setInternetDestinationType( + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + internetDestinationType) { + this.internetDestinationType = internetDestinationType; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + getInternetDestinationType() { + return internetDestinationType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb that = + (EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb) o; + return Objects.equals(destination, that.destination) + && Objects.equals(internetDestinationType, that.internetDestinationType); + } + + @Override + public int hashCode() { + return Objects.hash(destination, internetDestinationType); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyInternetDestinationPb.class) + .add("destination", destination) + .add("internetDestinationType", internetDestinationType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPb.java new file mode 100755 index 000000000..4a22b8fb7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EgressNetworkPolicyNetworkAccessPolicyPb { + @JsonProperty("allowed_internet_destinations") + private Collection + allowedInternetDestinations; + + @JsonProperty("allowed_storage_destinations") + private Collection + allowedStorageDestinations; + + @JsonProperty("policy_enforcement") + private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement; + + @JsonProperty("restriction_mode") + private EgressNetworkPolicyNetworkAccessPolicyRestrictionMode restrictionMode; + + public EgressNetworkPolicyNetworkAccessPolicyPb setAllowedInternetDestinations( + Collection + allowedInternetDestinations) { + this.allowedInternetDestinations = allowedInternetDestinations; + return this; + } + + public Collection + getAllowedInternetDestinations() { + return allowedInternetDestinations; + } + + public EgressNetworkPolicyNetworkAccessPolicyPb setAllowedStorageDestinations( + Collection + allowedStorageDestinations) { + this.allowedStorageDestinations = allowedStorageDestinations; + return this; + } + + public Collection + getAllowedStorageDestinations() { + return allowedStorageDestinations; + } + + public EgressNetworkPolicyNetworkAccessPolicyPb setPolicyEnforcement( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement) { + this.policyEnforcement = policyEnforcement; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement getPolicyEnforcement() { + return policyEnforcement; + } + + public EgressNetworkPolicyNetworkAccessPolicyPb setRestrictionMode( + EgressNetworkPolicyNetworkAccessPolicyRestrictionMode restrictionMode) { + this.restrictionMode = restrictionMode; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyRestrictionMode getRestrictionMode() { + return restrictionMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyPb that = (EgressNetworkPolicyNetworkAccessPolicyPb) o; + return Objects.equals(allowedInternetDestinations, that.allowedInternetDestinations) + && Objects.equals(allowedStorageDestinations, that.allowedStorageDestinations) + && Objects.equals(policyEnforcement, that.policyEnforcement) + && Objects.equals(restrictionMode, that.restrictionMode); + } + + @Override + public int hashCode() { + return Objects.hash( + allowedInternetDestinations, + allowedStorageDestinations, + policyEnforcement, + restrictionMode); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyPb.class) + .add("allowedInternetDestinations", allowedInternetDestinations) + .add("allowedStorageDestinations", allowedStorageDestinations) + .add("policyEnforcement", policyEnforcement) + .add("restrictionMode", restrictionMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java index feacf01a5..e87429e6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java @@ -4,17 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement + .EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement + .EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDeserializer.class) public class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement { /** * When empty, it means dry run for all products. When non-empty, it means dry run for specific * products and for the other products, they will run in enforced mode. */ - @JsonProperty("dry_run_mode_product_filter") private Collection dryRunModeProductFilter; @@ -22,7 +38,6 @@ public class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement { * The mode of policy enforcement. ENFORCED blocks traffic that violates policy, while DRY_RUN * only logs violations without blocking. When not specified, defaults to ENFORCED. */ - @JsonProperty("enforcement_mode") private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode enforcementMode; public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement setDryRunModeProductFilter( @@ -70,4 +85,49 @@ public String toString() { .add("enforcementMode", enforcementMode) .toString(); } + + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb toPb() { + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb pb = + new EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb(); + pb.setDryRunModeProductFilter(dryRunModeProductFilter); + pb.setEnforcementMode(enforcementMode); + + return pb; + } + + static EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement fromPb( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb pb) { + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement model = + new EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement(); + model.setDryRunModeProductFilter(pb.getDryRunModeProductFilter()); + model.setEnforcementMode(pb.getEnforcementMode()); + + return model; + } + + public static class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb pb = + mapper.readValue(p, EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb.class); + return EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb.java new file mode 100755 index 000000000..059ffbd50 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb { + @JsonProperty("dry_run_mode_product_filter") + private Collection + dryRunModeProductFilter; + + @JsonProperty("enforcement_mode") + private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode enforcementMode; + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb setDryRunModeProductFilter( + Collection + dryRunModeProductFilter) { + this.dryRunModeProductFilter = dryRunModeProductFilter; + return this; + } + + public Collection + getDryRunModeProductFilter() { + return dryRunModeProductFilter; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb setEnforcementMode( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode enforcementMode) { + this.enforcementMode = enforcementMode; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode + getEnforcementMode() { + return enforcementMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb that = + (EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb) o; + return Objects.equals(dryRunModeProductFilter, that.dryRunModeProductFilter) + && Objects.equals(enforcementMode, that.enforcementMode); + } + + @Override + public int hashCode() { + return Objects.hash(dryRunModeProductFilter, enforcementMode); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementPb.class) + .add("dryRunModeProductFilter", dryRunModeProductFilter) + .add("enforcementMode", enforcementMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java index 8ea733247..f8164e160 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java @@ -4,30 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Users can specify accessible storage destinations. */ @Generated +@JsonSerialize( + using = + EgressNetworkPolicyNetworkAccessPolicyStorageDestination + .EgressNetworkPolicyNetworkAccessPolicyStorageDestinationSerializer.class) +@JsonDeserialize( + using = + EgressNetworkPolicyNetworkAccessPolicyStorageDestination + .EgressNetworkPolicyNetworkAccessPolicyStorageDestinationDeserializer.class) public class EgressNetworkPolicyNetworkAccessPolicyStorageDestination { /** The Azure storage account name. */ - @JsonProperty("azure_storage_account") private String azureStorageAccount; /** The Azure storage service type (blob, dfs, etc.). */ - @JsonProperty("azure_storage_service") private String azureStorageService; /** */ - @JsonProperty("bucket_name") private String bucketName; /** The region of the S3 bucket. */ - @JsonProperty("region") private String region; /** The type of storage destination. */ - @JsonProperty("storage_destination_type") private EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType storageDestinationType; @@ -110,4 +122,55 @@ public String toString() { .add("storageDestinationType", storageDestinationType) .toString(); } + + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb toPb() { + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb pb = + new EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb(); + pb.setAzureStorageAccount(azureStorageAccount); + pb.setAzureStorageService(azureStorageService); + pb.setBucketName(bucketName); + pb.setRegion(region); + pb.setStorageDestinationType(storageDestinationType); + + return pb; + } + + static EgressNetworkPolicyNetworkAccessPolicyStorageDestination fromPb( + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb pb) { + EgressNetworkPolicyNetworkAccessPolicyStorageDestination model = + new EgressNetworkPolicyNetworkAccessPolicyStorageDestination(); + model.setAzureStorageAccount(pb.getAzureStorageAccount()); + model.setAzureStorageService(pb.getAzureStorageService()); + model.setBucketName(pb.getBucketName()); + model.setRegion(pb.getRegion()); + model.setStorageDestinationType(pb.getStorageDestinationType()); + + return model; + } + + public static class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + EgressNetworkPolicyNetworkAccessPolicyStorageDestination value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationDeserializer + extends JsonDeserializer { + @Override + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb pb = + mapper.readValue(p, EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb.class); + return EgressNetworkPolicyNetworkAccessPolicyStorageDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb.java new file mode 100755 index 000000000..a7712cf01 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb.java @@ -0,0 +1,109 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Users can specify accessible storage destinations. */ +@Generated +class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb { + @JsonProperty("azure_storage_account") + private String azureStorageAccount; + + @JsonProperty("azure_storage_service") + private String azureStorageService; + + @JsonProperty("bucket_name") + private String bucketName; + + @JsonProperty("region") + private String region; + + @JsonProperty("storage_destination_type") + private EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + storageDestinationType; + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb setAzureStorageAccount( + String azureStorageAccount) { + this.azureStorageAccount = azureStorageAccount; + return this; + } + + public String getAzureStorageAccount() { + return azureStorageAccount; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb setAzureStorageService( + String azureStorageService) { + this.azureStorageService = azureStorageService; + return this; + } + + public String getAzureStorageService() { + return azureStorageService; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb setBucketName( + String bucketName) { + this.bucketName = bucketName; + return this; + } + + public String getBucketName() { + return bucketName; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb setStorageDestinationType( + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + storageDestinationType) { + this.storageDestinationType = storageDestinationType; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + getStorageDestinationType() { + return storageDestinationType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb that = + (EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb) o; + return Objects.equals(azureStorageAccount, that.azureStorageAccount) + && Objects.equals(azureStorageService, that.azureStorageService) + && Objects.equals(bucketName, that.bucketName) + && Objects.equals(region, that.region) + && Objects.equals(storageDestinationType, that.storageDestinationType); + } + + @Override + public int hashCode() { + return Objects.hash( + azureStorageAccount, azureStorageService, bucketName, region, storageDestinationType); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyStorageDestinationPb.class) + .add("azureStorageAccount", azureStorageAccount) + .add("azureStorageService", azureStorageService) + .add("bucketName", bucketName) + .add("region", region) + .add("storageDestinationType", storageDestinationType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyPb.java new file mode 100755 index 000000000..fb5723299 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyPb.java @@ -0,0 +1,49 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The network policies applying for egress traffic. This message is used by the UI/REST API. We + * translate this message to the format expected by the dataplane in Lakehouse Network Manager (for + * the format expected by the dataplane, see networkconfig.textproto). + */ +@Generated +class EgressNetworkPolicyPb { + @JsonProperty("internet_access") + private EgressNetworkPolicyInternetAccessPolicy internetAccess; + + public EgressNetworkPolicyPb setInternetAccess( + EgressNetworkPolicyInternetAccessPolicy internetAccess) { + this.internetAccess = internetAccess; + return this; + } + + public EgressNetworkPolicyInternetAccessPolicy getInternetAccess() { + return internetAccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyPb that = (EgressNetworkPolicyPb) o; + return Objects.equals(internetAccess, that.internetAccess); + } + + @Override + public int hashCode() { + return Objects.hash(internetAccess); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyPb.class) + .add("internetAccess", internetAccess) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java index 56cfaf882..29bd0e42c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EmailConfig.EmailConfigSerializer.class) +@JsonDeserialize(using = EmailConfig.EmailConfigDeserializer.class) public class EmailConfig { /** Email addresses to notify. */ - @JsonProperty("addresses") private Collection addresses; public EmailConfig setAddresses(Collection addresses) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(EmailConfig.class).add("addresses", addresses).toString(); } + + EmailConfigPb toPb() { + EmailConfigPb pb = new EmailConfigPb(); + pb.setAddresses(addresses); + + return pb; + } + + static EmailConfig fromPb(EmailConfigPb pb) { + EmailConfig model = new EmailConfig(); + model.setAddresses(pb.getAddresses()); + + return model; + } + + public static class EmailConfigSerializer extends JsonSerializer { + @Override + public void serialize(EmailConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmailConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmailConfigDeserializer extends JsonDeserializer { + @Override + public EmailConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmailConfigPb pb = mapper.readValue(p, EmailConfigPb.class); + return EmailConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfigPb.java new file mode 100755 index 000000000..357104dcd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfigPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EmailConfigPb { + @JsonProperty("addresses") + private Collection addresses; + + public EmailConfigPb setAddresses(Collection addresses) { + this.addresses = addresses; + return this; + } + + public Collection getAddresses() { + return addresses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EmailConfigPb that = (EmailConfigPb) o; + return Objects.equals(addresses, that.addresses); + } + + @Override + public int hashCode() { + return Objects.hash(addresses); + } + + @Override + public String toString() { + return new ToStringer(EmailConfigPb.class).add("addresses", addresses).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java index 43f12c991..20f1e6ac0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Empty.EmptySerializer.class) +@JsonDeserialize(using = Empty.EmptyDeserializer.class) public class Empty { @Override @@ -25,4 +37,35 @@ public int hashCode() { public String toString() { return new ToStringer(Empty.class).toString(); } + + EmptyPb toPb() { + EmptyPb pb = new EmptyPb(); + + return pb; + } + + static Empty fromPb(EmptyPb pb) { + Empty model = new Empty(); + + return model; + } + + public static class EmptySerializer extends JsonSerializer { + @Override + public void serialize(Empty value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmptyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmptyDeserializer extends JsonDeserializer { + @Override + public Empty deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmptyPb pb = mapper.readValue(p, EmptyPb.class); + return Empty.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmptyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmptyPb.java new file mode 100755 index 000000000..7c6a37f58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmptyPb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EmptyPb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EmptyPb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java index 615acaa97..fe4d96517 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EnableExportNotebook.EnableExportNotebookSerializer.class) +@JsonDeserialize(using = EnableExportNotebook.EnableExportNotebookDeserializer.class) public class EnableExportNotebook { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -19,7 +29,6 @@ public class EnableExportNotebook { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public EnableExportNotebook setBooleanVal(BooleanMessage booleanVal) { @@ -61,4 +70,42 @@ public String toString() { .add("settingName", settingName) .toString(); } + + EnableExportNotebookPb toPb() { + EnableExportNotebookPb pb = new EnableExportNotebookPb(); + pb.setBooleanVal(booleanVal); + pb.setSettingName(settingName); + + return pb; + } + + static EnableExportNotebook fromPb(EnableExportNotebookPb pb) { + EnableExportNotebook model = new EnableExportNotebook(); + model.setBooleanVal(pb.getBooleanVal()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class EnableExportNotebookSerializer extends JsonSerializer { + @Override + public void serialize( + EnableExportNotebook value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnableExportNotebookPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnableExportNotebookDeserializer + extends JsonDeserializer { + @Override + public EnableExportNotebook deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnableExportNotebookPb pb = mapper.readValue(p, EnableExportNotebookPb.class); + return EnableExportNotebook.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java index 31eee1129..2200ccb77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java @@ -33,7 +33,7 @@ public EnableExportNotebook patchEnableExportNotebook(UpdateEnableExportNotebook String path = "/api/2.0/settings/types/enable-export-notebook/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnableExportNotebook.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookPb.java new file mode 100755 index 000000000..0515557c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnableExportNotebookPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("setting_name") + private String settingName; + + public EnableExportNotebookPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableExportNotebookPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableExportNotebookPb that = (EnableExportNotebookPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableExportNotebookPb.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java index 000b182be..7e6c48015 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java @@ -24,7 +24,7 @@ public DeleteAccountIpAccessEnableResponse delete(DeleteAccountIpAccessEnableReq apiClient.configuredAccountID()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteAccountIpAccessEnableResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public AccountIpAccessEnable get(GetAccountIpAccessEnableRequest request) { apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AccountIpAccessEnable.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public AccountIpAccessEnable update(UpdateAccountIpAccessEnableRequest request) apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountIpAccessEnable.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java index db002609e..587cc1c87 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EnableNotebookTableClipboard.EnableNotebookTableClipboardSerializer.class) +@JsonDeserialize( + using = EnableNotebookTableClipboard.EnableNotebookTableClipboardDeserializer.class) public class EnableNotebookTableClipboard { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -19,7 +30,6 @@ public class EnableNotebookTableClipboard { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public EnableNotebookTableClipboard setBooleanVal(BooleanMessage booleanVal) { @@ -61,4 +71,43 @@ public String toString() { .add("settingName", settingName) .toString(); } + + EnableNotebookTableClipboardPb toPb() { + EnableNotebookTableClipboardPb pb = new EnableNotebookTableClipboardPb(); + pb.setBooleanVal(booleanVal); + pb.setSettingName(settingName); + + return pb; + } + + static EnableNotebookTableClipboard fromPb(EnableNotebookTableClipboardPb pb) { + EnableNotebookTableClipboard model = new EnableNotebookTableClipboard(); + model.setBooleanVal(pb.getBooleanVal()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class EnableNotebookTableClipboardSerializer + extends JsonSerializer { + @Override + public void serialize( + EnableNotebookTableClipboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnableNotebookTableClipboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnableNotebookTableClipboardDeserializer + extends JsonDeserializer { + @Override + public EnableNotebookTableClipboard deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnableNotebookTableClipboardPb pb = mapper.readValue(p, EnableNotebookTableClipboardPb.class); + return EnableNotebookTableClipboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java index bcff25338..31d4d99ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java @@ -34,7 +34,7 @@ public EnableNotebookTableClipboard patchEnableNotebookTableClipboard( String path = "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnableNotebookTableClipboard.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardPb.java new file mode 100755 index 000000000..bdd3eda77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnableNotebookTableClipboardPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("setting_name") + private String settingName; + + public EnableNotebookTableClipboardPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableNotebookTableClipboardPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableNotebookTableClipboardPb that = (EnableNotebookTableClipboardPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableNotebookTableClipboardPb.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java index 990d5b733..5bbb4e544 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EnableResultsDownloading.EnableResultsDownloadingSerializer.class) +@JsonDeserialize(using = EnableResultsDownloading.EnableResultsDownloadingDeserializer.class) public class EnableResultsDownloading { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -19,7 +29,6 @@ public class EnableResultsDownloading { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public EnableResultsDownloading setBooleanVal(BooleanMessage booleanVal) { @@ -61,4 +70,43 @@ public String toString() { .add("settingName", settingName) .toString(); } + + EnableResultsDownloadingPb toPb() { + EnableResultsDownloadingPb pb = new EnableResultsDownloadingPb(); + pb.setBooleanVal(booleanVal); + pb.setSettingName(settingName); + + return pb; + } + + static EnableResultsDownloading fromPb(EnableResultsDownloadingPb pb) { + EnableResultsDownloading model = new EnableResultsDownloading(); + model.setBooleanVal(pb.getBooleanVal()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class EnableResultsDownloadingSerializer + extends JsonSerializer { + @Override + public void serialize( + EnableResultsDownloading value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnableResultsDownloadingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnableResultsDownloadingDeserializer + extends JsonDeserializer { + @Override + public EnableResultsDownloading deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnableResultsDownloadingPb pb = mapper.readValue(p, EnableResultsDownloadingPb.class); + return EnableResultsDownloading.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java index 8632b82dc..29a734259 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java @@ -34,7 +34,7 @@ public EnableResultsDownloading patchEnableResultsDownloading( String path = "/api/2.0/settings/types/enable-results-downloading/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnableResultsDownloading.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingPb.java new file mode 100755 index 000000000..7d74be78e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnableResultsDownloadingPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("setting_name") + private String settingName; + + public EnableResultsDownloadingPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableResultsDownloadingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableResultsDownloadingPb that = (EnableResultsDownloadingPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableResultsDownloadingPb.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java index 316b5d6c2..33fcf5595 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** SHIELD feature: ESM */ @Generated +@JsonSerialize(using = EnhancedSecurityMonitoring.EnhancedSecurityMonitoringSerializer.class) +@JsonDeserialize(using = EnhancedSecurityMonitoring.EnhancedSecurityMonitoringDeserializer.class) public class EnhancedSecurityMonitoring { /** */ - @JsonProperty("is_enabled") private Boolean isEnabled; public EnhancedSecurityMonitoring setIsEnabled(Boolean isEnabled) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(EnhancedSecurityMonitoring.class).add("isEnabled", isEnabled).toString(); } + + EnhancedSecurityMonitoringPb toPb() { + EnhancedSecurityMonitoringPb pb = new EnhancedSecurityMonitoringPb(); + pb.setIsEnabled(isEnabled); + + return pb; + } + + static EnhancedSecurityMonitoring fromPb(EnhancedSecurityMonitoringPb pb) { + EnhancedSecurityMonitoring model = new EnhancedSecurityMonitoring(); + model.setIsEnabled(pb.getIsEnabled()); + + return model; + } + + public static class EnhancedSecurityMonitoringSerializer + extends JsonSerializer { + @Override + public void serialize( + EnhancedSecurityMonitoring value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnhancedSecurityMonitoringPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnhancedSecurityMonitoringDeserializer + extends JsonDeserializer { + @Override + public EnhancedSecurityMonitoring deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnhancedSecurityMonitoringPb pb = mapper.readValue(p, EnhancedSecurityMonitoringPb.class); + return EnhancedSecurityMonitoring.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java index c1d90c461..344abab8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java @@ -22,7 +22,7 @@ public EnhancedSecurityMonitoringSetting get( String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, EnhancedSecurityMonitoringSetting.class); } catch (IOException e) { @@ -36,7 +36,7 @@ public EnhancedSecurityMonitoringSetting update( String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EnhancedSecurityMonitoringSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringPb.java new file mode 100755 index 000000000..9756ddd5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** SHIELD feature: ESM */ +@Generated +class EnhancedSecurityMonitoringPb { + @JsonProperty("is_enabled") + private Boolean isEnabled; + + public EnhancedSecurityMonitoringPb setIsEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled; + return this; + } + + public Boolean getIsEnabled() { + return isEnabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnhancedSecurityMonitoringPb that = (EnhancedSecurityMonitoringPb) o; + return Objects.equals(isEnabled, that.isEnabled); + } + + @Override + public int hashCode() { + return Objects.hash(isEnabled); + } + + @Override + public String toString() { + return new ToStringer(EnhancedSecurityMonitoringPb.class) + .add("isEnabled", isEnabled) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java index c0b0a4544..c7ccea83c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = EnhancedSecurityMonitoringSetting.EnhancedSecurityMonitoringSettingSerializer.class) +@JsonDeserialize( + using = EnhancedSecurityMonitoringSetting.EnhancedSecurityMonitoringSettingDeserializer.class) public class EnhancedSecurityMonitoringSetting { /** SHIELD feature: ESM */ - @JsonProperty("enhanced_security_monitoring_workspace") private EnhancedSecurityMonitoring enhancedSecurityMonitoringWorkspace; /** @@ -21,7 +33,6 @@ public class EnhancedSecurityMonitoringSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +41,6 @@ public class EnhancedSecurityMonitoringSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public EnhancedSecurityMonitoringSetting setEnhancedSecurityMonitoringWorkspace( @@ -85,4 +95,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + EnhancedSecurityMonitoringSettingPb toPb() { + EnhancedSecurityMonitoringSettingPb pb = new EnhancedSecurityMonitoringSettingPb(); + pb.setEnhancedSecurityMonitoringWorkspace(enhancedSecurityMonitoringWorkspace); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static EnhancedSecurityMonitoringSetting fromPb(EnhancedSecurityMonitoringSettingPb pb) { + EnhancedSecurityMonitoringSetting model = new EnhancedSecurityMonitoringSetting(); + model.setEnhancedSecurityMonitoringWorkspace(pb.getEnhancedSecurityMonitoringWorkspace()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class EnhancedSecurityMonitoringSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + EnhancedSecurityMonitoringSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnhancedSecurityMonitoringSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnhancedSecurityMonitoringSettingDeserializer + extends JsonDeserializer { + @Override + public EnhancedSecurityMonitoringSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnhancedSecurityMonitoringSettingPb pb = + mapper.readValue(p, EnhancedSecurityMonitoringSettingPb.class); + return EnhancedSecurityMonitoringSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSettingPb.java new file mode 100755 index 000000000..866970025 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSettingPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EnhancedSecurityMonitoringSettingPb { + @JsonProperty("enhanced_security_monitoring_workspace") + private EnhancedSecurityMonitoring enhancedSecurityMonitoringWorkspace; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public EnhancedSecurityMonitoringSettingPb setEnhancedSecurityMonitoringWorkspace( + EnhancedSecurityMonitoring enhancedSecurityMonitoringWorkspace) { + this.enhancedSecurityMonitoringWorkspace = enhancedSecurityMonitoringWorkspace; + return this; + } + + public EnhancedSecurityMonitoring getEnhancedSecurityMonitoringWorkspace() { + return enhancedSecurityMonitoringWorkspace; + } + + public EnhancedSecurityMonitoringSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public EnhancedSecurityMonitoringSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnhancedSecurityMonitoringSettingPb that = (EnhancedSecurityMonitoringSettingPb) o; + return Objects.equals( + enhancedSecurityMonitoringWorkspace, that.enhancedSecurityMonitoringWorkspace) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(enhancedSecurityMonitoringWorkspace, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnhancedSecurityMonitoringSettingPb.class) + .add("enhancedSecurityMonitoringWorkspace", enhancedSecurityMonitoringWorkspace) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccount.java index fe702e861..ca9405bfe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccount.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccount.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Account level policy for ESM */ @Generated +@JsonSerialize(using = EsmEnablementAccount.EsmEnablementAccountSerializer.class) +@JsonDeserialize(using = EsmEnablementAccount.EsmEnablementAccountDeserializer.class) public class EsmEnablementAccount { /** */ - @JsonProperty("is_enforced") private Boolean isEnforced; public EsmEnablementAccount setIsEnforced(Boolean isEnforced) { @@ -40,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(EsmEnablementAccount.class).add("isEnforced", isEnforced).toString(); } + + EsmEnablementAccountPb toPb() { + EsmEnablementAccountPb pb = new EsmEnablementAccountPb(); + pb.setIsEnforced(isEnforced); + + return pb; + } + + static EsmEnablementAccount fromPb(EsmEnablementAccountPb pb) { + EsmEnablementAccount model = new EsmEnablementAccount(); + model.setIsEnforced(pb.getIsEnforced()); + + return model; + } + + public static class EsmEnablementAccountSerializer extends JsonSerializer { + @Override + public void serialize( + EsmEnablementAccount value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EsmEnablementAccountPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EsmEnablementAccountDeserializer + extends JsonDeserializer { + @Override + public EsmEnablementAccount deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EsmEnablementAccountPb pb = mapper.readValue(p, EsmEnablementAccountPb.class); + return EsmEnablementAccount.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java index 16a15ef85..c7443dc7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java @@ -24,7 +24,7 @@ public EsmEnablementAccountSetting get(GetEsmEnablementAccountSettingRequest req apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, EsmEnablementAccountSetting.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public EsmEnablementAccountSetting update(UpdateEsmEnablementAccountSettingReque apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EsmEnablementAccountSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountPb.java new file mode 100755 index 000000000..9e6efde5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Account level policy for ESM */ +@Generated +class EsmEnablementAccountPb { + @JsonProperty("is_enforced") + private Boolean isEnforced; + + public EsmEnablementAccountPb setIsEnforced(Boolean isEnforced) { + this.isEnforced = isEnforced; + return this; + } + + public Boolean getIsEnforced() { + return isEnforced; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EsmEnablementAccountPb that = (EsmEnablementAccountPb) o; + return Objects.equals(isEnforced, that.isEnforced); + } + + @Override + public int hashCode() { + return Objects.hash(isEnforced); + } + + @Override + public String toString() { + return new ToStringer(EsmEnablementAccountPb.class).add("isEnforced", isEnforced).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java index 9dddd8ab0..5e5babde1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EsmEnablementAccountSetting.EsmEnablementAccountSettingSerializer.class) +@JsonDeserialize(using = EsmEnablementAccountSetting.EsmEnablementAccountSettingDeserializer.class) public class EsmEnablementAccountSetting { /** Account level policy for ESM */ - @JsonProperty("esm_enablement_account") private EsmEnablementAccount esmEnablementAccount; /** @@ -21,7 +31,6 @@ public class EsmEnablementAccountSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class EsmEnablementAccountSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public EsmEnablementAccountSetting setEsmEnablementAccount( @@ -84,4 +92,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + EsmEnablementAccountSettingPb toPb() { + EsmEnablementAccountSettingPb pb = new EsmEnablementAccountSettingPb(); + pb.setEsmEnablementAccount(esmEnablementAccount); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static EsmEnablementAccountSetting fromPb(EsmEnablementAccountSettingPb pb) { + EsmEnablementAccountSetting model = new EsmEnablementAccountSetting(); + model.setEsmEnablementAccount(pb.getEsmEnablementAccount()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class EsmEnablementAccountSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + EsmEnablementAccountSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EsmEnablementAccountSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EsmEnablementAccountSettingDeserializer + extends JsonDeserializer { + @Override + public EsmEnablementAccountSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EsmEnablementAccountSettingPb pb = mapper.readValue(p, EsmEnablementAccountSettingPb.class); + return EsmEnablementAccountSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSettingPb.java new file mode 100755 index 000000000..172a1963d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSettingPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EsmEnablementAccountSettingPb { + @JsonProperty("esm_enablement_account") + private EsmEnablementAccount esmEnablementAccount; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public EsmEnablementAccountSettingPb setEsmEnablementAccount( + EsmEnablementAccount esmEnablementAccount) { + this.esmEnablementAccount = esmEnablementAccount; + return this; + } + + public EsmEnablementAccount getEsmEnablementAccount() { + return esmEnablementAccount; + } + + public EsmEnablementAccountSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public EsmEnablementAccountSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EsmEnablementAccountSettingPb that = (EsmEnablementAccountSettingPb) o; + return Objects.equals(esmEnablementAccount, that.esmEnablementAccount) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(esmEnablementAccount, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(EsmEnablementAccountSettingPb.class) + .add("esmEnablementAccount", esmEnablementAccount) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeToken.java index c5a36337b..c333ef756 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeToken.java @@ -4,31 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The exchange token is the result of the token exchange with the IdP */ @Generated +@JsonSerialize(using = ExchangeToken.ExchangeTokenSerializer.class) +@JsonDeserialize(using = ExchangeToken.ExchangeTokenDeserializer.class) public class ExchangeToken { /** The requested token. */ - @JsonProperty("credential") private String credential; /** The end-of-life timestamp of the token. The value is in milliseconds since the Unix epoch. */ - @JsonProperty("credentialEolTime") private Long credentialEolTime; /** User ID of the user that owns this token. */ - @JsonProperty("ownerId") private Long ownerId; /** The scopes of access granted in the token. */ - @JsonProperty("scopes") private Collection scopes; /** The type of this exchange token */ - @JsonProperty("tokenType") private TokenType tokenType; public ExchangeToken setCredential(String credential) { @@ -103,4 +109,45 @@ public String toString() { .add("tokenType", tokenType) .toString(); } + + ExchangeTokenPb toPb() { + ExchangeTokenPb pb = new ExchangeTokenPb(); + pb.setCredential(credential); + pb.setCredentialEolTime(credentialEolTime); + pb.setOwnerId(ownerId); + pb.setScopes(scopes); + pb.setTokenType(tokenType); + + return pb; + } + + static ExchangeToken fromPb(ExchangeTokenPb pb) { + ExchangeToken model = new ExchangeToken(); + model.setCredential(pb.getCredential()); + model.setCredentialEolTime(pb.getCredentialEolTime()); + model.setOwnerId(pb.getOwnerId()); + model.setScopes(pb.getScopes()); + model.setTokenType(pb.getTokenType()); + + return model; + } + + public static class ExchangeTokenSerializer extends JsonSerializer { + @Override + public void serialize(ExchangeToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangeTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeTokenDeserializer extends JsonDeserializer { + @Override + public ExchangeToken deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangeTokenPb pb = mapper.readValue(p, ExchangeTokenPb.class); + return ExchangeToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenPb.java new file mode 100755 index 000000000..c11f3c4a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The exchange token is the result of the token exchange with the IdP */ +@Generated +class ExchangeTokenPb { + @JsonProperty("credential") + private String credential; + + @JsonProperty("credentialEolTime") + private Long credentialEolTime; + + @JsonProperty("ownerId") + private Long ownerId; + + @JsonProperty("scopes") + private Collection scopes; + + @JsonProperty("tokenType") + private TokenType tokenType; + + public ExchangeTokenPb setCredential(String credential) { + this.credential = credential; + return this; + } + + public String getCredential() { + return credential; + } + + public ExchangeTokenPb setCredentialEolTime(Long credentialEolTime) { + this.credentialEolTime = credentialEolTime; + return this; + } + + public Long getCredentialEolTime() { + return credentialEolTime; + } + + public ExchangeTokenPb setOwnerId(Long ownerId) { + this.ownerId = ownerId; + return this; + } + + public Long getOwnerId() { + return ownerId; + } + + public ExchangeTokenPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + public ExchangeTokenPb setTokenType(TokenType tokenType) { + this.tokenType = tokenType; + return this; + } + + public TokenType getTokenType() { + return tokenType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeTokenPb that = (ExchangeTokenPb) o; + return Objects.equals(credential, that.credential) + && Objects.equals(credentialEolTime, that.credentialEolTime) + && Objects.equals(ownerId, that.ownerId) + && Objects.equals(scopes, that.scopes) + && Objects.equals(tokenType, that.tokenType); + } + + @Override + public int hashCode() { + return Objects.hash(credential, credentialEolTime, ownerId, scopes, tokenType); + } + + @Override + public String toString() { + return new ToStringer(ExchangeTokenPb.class) + .add("credential", credential) + .add("credentialEolTime", credentialEolTime) + .add("ownerId", ownerId) + .add("scopes", scopes) + .add("tokenType", tokenType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java index ab7ceb62a..40b1ecb2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java @@ -4,23 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Exchange a token with the IdP */ @Generated +@JsonSerialize(using = ExchangeTokenRequest.ExchangeTokenRequestSerializer.class) +@JsonDeserialize(using = ExchangeTokenRequest.ExchangeTokenRequestDeserializer.class) public class ExchangeTokenRequest { /** The partition of Credentials store */ - @JsonProperty("partitionId") private PartitionId partitionId; /** Array of scopes for the token request. */ - @JsonProperty("scopes") private Collection scopes; /** A list of token types being requested */ - @JsonProperty("tokenType") private Collection tokenType; public ExchangeTokenRequest setPartitionId(PartitionId partitionId) { @@ -73,4 +81,44 @@ public String toString() { .add("tokenType", tokenType) .toString(); } + + ExchangeTokenRequestPb toPb() { + ExchangeTokenRequestPb pb = new ExchangeTokenRequestPb(); + pb.setPartitionId(partitionId); + pb.setScopes(scopes); + pb.setTokenType(tokenType); + + return pb; + } + + static ExchangeTokenRequest fromPb(ExchangeTokenRequestPb pb) { + ExchangeTokenRequest model = new ExchangeTokenRequest(); + model.setPartitionId(pb.getPartitionId()); + model.setScopes(pb.getScopes()); + model.setTokenType(pb.getTokenType()); + + return model; + } + + public static class ExchangeTokenRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ExchangeTokenRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangeTokenRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeTokenRequestDeserializer + extends JsonDeserializer { + @Override + public ExchangeTokenRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangeTokenRequestPb pb = mapper.readValue(p, ExchangeTokenRequestPb.class); + return ExchangeTokenRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequestPb.java new file mode 100755 index 000000000..74fd130e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequestPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Exchange a token with the IdP */ +@Generated +class ExchangeTokenRequestPb { + @JsonProperty("partitionId") + private PartitionId partitionId; + + @JsonProperty("scopes") + private Collection scopes; + + @JsonProperty("tokenType") + private Collection tokenType; + + public ExchangeTokenRequestPb setPartitionId(PartitionId partitionId) { + this.partitionId = partitionId; + return this; + } + + public PartitionId getPartitionId() { + return partitionId; + } + + public ExchangeTokenRequestPb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + public ExchangeTokenRequestPb setTokenType(Collection tokenType) { + this.tokenType = tokenType; + return this; + } + + public Collection getTokenType() { + return tokenType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeTokenRequestPb that = (ExchangeTokenRequestPb) o; + return Objects.equals(partitionId, that.partitionId) + && Objects.equals(scopes, that.scopes) + && Objects.equals(tokenType, that.tokenType); + } + + @Override + public int hashCode() { + return Objects.hash(partitionId, scopes, tokenType); + } + + @Override + public String toString() { + return new ToStringer(ExchangeTokenRequestPb.class) + .add("partitionId", partitionId) + .add("scopes", scopes) + .add("tokenType", tokenType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java index 9f5fd4c42..f0ff9840a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Exhanged tokens were successfully returned. */ @Generated +@JsonSerialize(using = ExchangeTokenResponse.ExchangeTokenResponseSerializer.class) +@JsonDeserialize(using = ExchangeTokenResponse.ExchangeTokenResponseDeserializer.class) public class ExchangeTokenResponse { /** */ - @JsonProperty("values") private Collection values; public ExchangeTokenResponse setValues(Collection values) { @@ -41,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(ExchangeTokenResponse.class).add("values", values).toString(); } + + ExchangeTokenResponsePb toPb() { + ExchangeTokenResponsePb pb = new ExchangeTokenResponsePb(); + pb.setValues(values); + + return pb; + } + + static ExchangeTokenResponse fromPb(ExchangeTokenResponsePb pb) { + ExchangeTokenResponse model = new ExchangeTokenResponse(); + model.setValues(pb.getValues()); + + return model; + } + + public static class ExchangeTokenResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ExchangeTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExchangeTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExchangeTokenResponseDeserializer + extends JsonDeserializer { + @Override + public ExchangeTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExchangeTokenResponsePb pb = mapper.readValue(p, ExchangeTokenResponsePb.class); + return ExchangeTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponsePb.java new file mode 100755 index 000000000..338208b15 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Exhanged tokens were successfully returned. */ +@Generated +class ExchangeTokenResponsePb { + @JsonProperty("values") + private Collection values; + + public ExchangeTokenResponsePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeTokenResponsePb that = (ExchangeTokenResponsePb) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return new ToStringer(ExchangeTokenResponsePb.class).add("values", values).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java index 45e0c9c3a..18f5e6c7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** An IP access list was successfully returned. */ @Generated +@JsonSerialize(using = FetchIpAccessListResponse.FetchIpAccessListResponseSerializer.class) +@JsonDeserialize(using = FetchIpAccessListResponse.FetchIpAccessListResponseDeserializer.class) public class FetchIpAccessListResponse { /** Definition of an IP Access list */ - @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; public FetchIpAccessListResponse setIpAccessList(IpAccessListInfo ipAccessList) { @@ -42,4 +52,41 @@ public String toString() { .add("ipAccessList", ipAccessList) .toString(); } + + FetchIpAccessListResponsePb toPb() { + FetchIpAccessListResponsePb pb = new FetchIpAccessListResponsePb(); + pb.setIpAccessList(ipAccessList); + + return pb; + } + + static FetchIpAccessListResponse fromPb(FetchIpAccessListResponsePb pb) { + FetchIpAccessListResponse model = new FetchIpAccessListResponse(); + model.setIpAccessList(pb.getIpAccessList()); + + return model; + } + + public static class FetchIpAccessListResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + FetchIpAccessListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FetchIpAccessListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FetchIpAccessListResponseDeserializer + extends JsonDeserializer { + @Override + public FetchIpAccessListResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FetchIpAccessListResponsePb pb = mapper.readValue(p, FetchIpAccessListResponsePb.class); + return FetchIpAccessListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponsePb.java new file mode 100755 index 000000000..015fb04c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** An IP access list was successfully returned. */ +@Generated +class FetchIpAccessListResponsePb { + @JsonProperty("ip_access_list") + private IpAccessListInfo ipAccessList; + + public FetchIpAccessListResponsePb setIpAccessList(IpAccessListInfo ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessListInfo getIpAccessList() { + return ipAccessList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FetchIpAccessListResponsePb that = (FetchIpAccessListResponsePb) o; + return Objects.equals(ipAccessList, that.ipAccessList); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessList); + } + + @Override + public String toString() { + return new ToStringer(FetchIpAccessListResponsePb.class) + .add("ipAccessList", ipAccessList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java index 4bbc481ad..eda49357b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GenericWebhookConfig.GenericWebhookConfigSerializer.class) +@JsonDeserialize(using = GenericWebhookConfig.GenericWebhookConfigDeserializer.class) public class GenericWebhookConfig { /** [Input-Only][Optional] Password for webhook. */ - @JsonProperty("password") private String password; /** [Output-Only] Whether password is set. */ - @JsonProperty("password_set") private Boolean passwordSet; /** [Input-Only] URL for webhook. */ - @JsonProperty("url") private String url; /** [Output-Only] Whether URL is set. */ - @JsonProperty("url_set") private Boolean urlSet; /** [Input-Only][Optional] Username for webhook. */ - @JsonProperty("username") private String username; /** [Output-Only] Whether username is set. */ - @JsonProperty("username_set") private Boolean usernameSet; public GenericWebhookConfig setPassword(String password) { @@ -116,4 +121,50 @@ public String toString() { .add("usernameSet", usernameSet) .toString(); } + + GenericWebhookConfigPb toPb() { + GenericWebhookConfigPb pb = new GenericWebhookConfigPb(); + pb.setPassword(password); + pb.setPasswordSet(passwordSet); + pb.setUrl(url); + pb.setUrlSet(urlSet); + pb.setUsername(username); + pb.setUsernameSet(usernameSet); + + return pb; + } + + static GenericWebhookConfig fromPb(GenericWebhookConfigPb pb) { + GenericWebhookConfig model = new GenericWebhookConfig(); + model.setPassword(pb.getPassword()); + model.setPasswordSet(pb.getPasswordSet()); + model.setUrl(pb.getUrl()); + model.setUrlSet(pb.getUrlSet()); + model.setUsername(pb.getUsername()); + model.setUsernameSet(pb.getUsernameSet()); + + return model; + } + + public static class GenericWebhookConfigSerializer extends JsonSerializer { + @Override + public void serialize( + GenericWebhookConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GenericWebhookConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GenericWebhookConfigDeserializer + extends JsonDeserializer { + @Override + public GenericWebhookConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GenericWebhookConfigPb pb = mapper.readValue(p, GenericWebhookConfigPb.class); + return GenericWebhookConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfigPb.java new file mode 100755 index 000000000..df6912336 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfigPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GenericWebhookConfigPb { + @JsonProperty("password") + private String password; + + @JsonProperty("password_set") + private Boolean passwordSet; + + @JsonProperty("url") + private String url; + + @JsonProperty("url_set") + private Boolean urlSet; + + @JsonProperty("username") + private String username; + + @JsonProperty("username_set") + private Boolean usernameSet; + + public GenericWebhookConfigPb setPassword(String password) { + this.password = password; + return this; + } + + public String getPassword() { + return password; + } + + public GenericWebhookConfigPb setPasswordSet(Boolean passwordSet) { + this.passwordSet = passwordSet; + return this; + } + + public Boolean getPasswordSet() { + return passwordSet; + } + + public GenericWebhookConfigPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + public GenericWebhookConfigPb setUrlSet(Boolean urlSet) { + this.urlSet = urlSet; + return this; + } + + public Boolean getUrlSet() { + return urlSet; + } + + public GenericWebhookConfigPb setUsername(String username) { + this.username = username; + return this; + } + + public String getUsername() { + return username; + } + + public GenericWebhookConfigPb setUsernameSet(Boolean usernameSet) { + this.usernameSet = usernameSet; + return this; + } + + public Boolean getUsernameSet() { + return usernameSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenericWebhookConfigPb that = (GenericWebhookConfigPb) o; + return Objects.equals(password, that.password) + && Objects.equals(passwordSet, that.passwordSet) + && Objects.equals(url, that.url) + && Objects.equals(urlSet, that.urlSet) + && Objects.equals(username, that.username) + && Objects.equals(usernameSet, that.usernameSet); + } + + @Override + public int hashCode() { + return Objects.hash(password, passwordSet, url, urlSet, username, usernameSet); + } + + @Override + public String toString() { + return new ToStringer(GenericWebhookConfigPb.class) + .add("password", password) + .add("passwordSet", passwordSet) + .add("url", url) + .add("urlSet", urlSet) + .add("username", username) + .add("usernameSet", usernameSet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java index 348af6711..41415c158 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the account IP access toggle setting */ @Generated +@JsonSerialize( + using = GetAccountIpAccessEnableRequest.GetAccountIpAccessEnableRequestSerializer.class) +@JsonDeserialize( + using = GetAccountIpAccessEnableRequest.GetAccountIpAccessEnableRequestDeserializer.class) public class GetAccountIpAccessEnableRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class GetAccountIpAccessEnableRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetAccountIpAccessEnableRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetAccountIpAccessEnableRequest.class).add("etag", etag).toString(); } + + GetAccountIpAccessEnableRequestPb toPb() { + GetAccountIpAccessEnableRequestPb pb = new GetAccountIpAccessEnableRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetAccountIpAccessEnableRequest fromPb(GetAccountIpAccessEnableRequestPb pb) { + GetAccountIpAccessEnableRequest model = new GetAccountIpAccessEnableRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetAccountIpAccessEnableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountIpAccessEnableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountIpAccessEnableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountIpAccessEnableRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountIpAccessEnableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountIpAccessEnableRequestPb pb = + mapper.readValue(p, GetAccountIpAccessEnableRequestPb.class); + return GetAccountIpAccessEnableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequestPb.java new file mode 100755 index 000000000..cc8b394e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the account IP access toggle setting */ +@Generated +class GetAccountIpAccessEnableRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAccountIpAccessEnableRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountIpAccessEnableRequestPb that = (GetAccountIpAccessEnableRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAccountIpAccessEnableRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java index 4d2da465d..ee043b5fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get IP access list */ @Generated +@JsonSerialize(using = GetAccountIpAccessListRequest.GetAccountIpAccessListRequestSerializer.class) +@JsonDeserialize( + using = GetAccountIpAccessListRequest.GetAccountIpAccessListRequestDeserializer.class) public class GetAccountIpAccessListRequest { /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; public GetAccountIpAccessListRequest setIpAccessListId(String ipAccessListId) { this.ipAccessListId = ipAccessListId; @@ -41,4 +53,42 @@ public String toString() { .add("ipAccessListId", ipAccessListId) .toString(); } + + GetAccountIpAccessListRequestPb toPb() { + GetAccountIpAccessListRequestPb pb = new GetAccountIpAccessListRequestPb(); + pb.setIpAccessListId(ipAccessListId); + + return pb; + } + + static GetAccountIpAccessListRequest fromPb(GetAccountIpAccessListRequestPb pb) { + GetAccountIpAccessListRequest model = new GetAccountIpAccessListRequest(); + model.setIpAccessListId(pb.getIpAccessListId()); + + return model; + } + + public static class GetAccountIpAccessListRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAccountIpAccessListRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAccountIpAccessListRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAccountIpAccessListRequestDeserializer + extends JsonDeserializer { + @Override + public GetAccountIpAccessListRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAccountIpAccessListRequestPb pb = + mapper.readValue(p, GetAccountIpAccessListRequestPb.class); + return GetAccountIpAccessListRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequestPb.java new file mode 100755 index 000000000..8eb20ff2d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get IP access list */ +@Generated +class GetAccountIpAccessListRequestPb { + @JsonIgnore private String ipAccessListId; + + public GetAccountIpAccessListRequestPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAccountIpAccessListRequestPb that = (GetAccountIpAccessListRequestPb) o; + return Objects.equals(ipAccessListId, that.ipAccessListId); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessListId); + } + + @Override + public String toString() { + return new ToStringer(GetAccountIpAccessListRequestPb.class) + .add("ipAccessListId", ipAccessListId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java index 48a8c3910..0315e7d00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Retrieve the AI/BI dashboard embedding access policy */ @Generated +@JsonSerialize( + using = + GetAibiDashboardEmbeddingAccessPolicySettingRequest + .GetAibiDashboardEmbeddingAccessPolicySettingRequestSerializer.class) +@JsonDeserialize( + using = + GetAibiDashboardEmbeddingAccessPolicySettingRequest + .GetAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer.class) public class GetAibiDashboardEmbeddingAccessPolicySettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetAibiDashboardEmbeddingAccessPolicySettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb toPb() { + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + new GetAibiDashboardEmbeddingAccessPolicySettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetAibiDashboardEmbeddingAccessPolicySettingRequest fromPb( + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb pb) { + GetAibiDashboardEmbeddingAccessPolicySettingRequest model = + new GetAibiDashboardEmbeddingAccessPolicySettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetAibiDashboardEmbeddingAccessPolicySettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAibiDashboardEmbeddingAccessPolicySettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetAibiDashboardEmbeddingAccessPolicySettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + mapper.readValue(p, GetAibiDashboardEmbeddingAccessPolicySettingRequestPb.class); + return GetAibiDashboardEmbeddingAccessPolicySettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequestPb.java new file mode 100755 index 000000000..2d9513b8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the AI/BI dashboard embedding access policy */ +@Generated +class GetAibiDashboardEmbeddingAccessPolicySettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingAccessPolicySettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingAccessPolicySettingRequestPb that = + (GetAibiDashboardEmbeddingAccessPolicySettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingAccessPolicySettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java index a9db24cfa..bc4005425 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Retrieve the list of domains approved to host embedded AI/BI dashboards */ @Generated +@JsonSerialize( + using = + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest + .GetAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer.class) +@JsonDeserialize( + using = + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest + .GetAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer.class) public class GetAibiDashboardEmbeddingApprovedDomainsSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetAibiDashboardEmbeddingApprovedDomainsSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb toPb() { + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + new GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetAibiDashboardEmbeddingApprovedDomainsSettingRequest fromPb( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb) { + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest model = + new GetAibiDashboardEmbeddingApprovedDomainsSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetAibiDashboardEmbeddingApprovedDomainsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + mapper.readValue(p, GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class); + return GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java new file mode 100755 index 000000000..0ca211c02 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the list of domains approved to host embedded AI/BI dashboards */ +@Generated +class GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb that = + (GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java index 2230b6b35..745b5a369 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the automatic cluster update setting */ @Generated +@JsonSerialize( + using = + GetAutomaticClusterUpdateSettingRequest.GetAutomaticClusterUpdateSettingRequestSerializer + .class) +@JsonDeserialize( + using = + GetAutomaticClusterUpdateSettingRequest.GetAutomaticClusterUpdateSettingRequestDeserializer + .class) public class GetAutomaticClusterUpdateSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetAutomaticClusterUpdateSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetAutomaticClusterUpdateSettingRequest setEtag(String etag) { @@ -51,4 +65,45 @@ public String toString() { .add("etag", etag) .toString(); } + + GetAutomaticClusterUpdateSettingRequestPb toPb() { + GetAutomaticClusterUpdateSettingRequestPb pb = new GetAutomaticClusterUpdateSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetAutomaticClusterUpdateSettingRequest fromPb( + GetAutomaticClusterUpdateSettingRequestPb pb) { + GetAutomaticClusterUpdateSettingRequest model = new GetAutomaticClusterUpdateSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetAutomaticClusterUpdateSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAutomaticClusterUpdateSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetAutomaticClusterUpdateSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAutomaticClusterUpdateSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetAutomaticClusterUpdateSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAutomaticClusterUpdateSettingRequestPb pb = + mapper.readValue(p, GetAutomaticClusterUpdateSettingRequestPb.class); + return GetAutomaticClusterUpdateSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequestPb.java new file mode 100755 index 000000000..925a7c054 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the automatic cluster update setting */ +@Generated +class GetAutomaticClusterUpdateSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAutomaticClusterUpdateSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAutomaticClusterUpdateSettingRequestPb that = (GetAutomaticClusterUpdateSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAutomaticClusterUpdateSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java index d079f96ee..703977cec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the compliance security profile setting */ @Generated +@JsonSerialize( + using = + GetComplianceSecurityProfileSettingRequest + .GetComplianceSecurityProfileSettingRequestSerializer.class) +@JsonDeserialize( + using = + GetComplianceSecurityProfileSettingRequest + .GetComplianceSecurityProfileSettingRequestDeserializer.class) public class GetComplianceSecurityProfileSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetComplianceSecurityProfileSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetComplianceSecurityProfileSettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + GetComplianceSecurityProfileSettingRequestPb toPb() { + GetComplianceSecurityProfileSettingRequestPb pb = + new GetComplianceSecurityProfileSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetComplianceSecurityProfileSettingRequest fromPb( + GetComplianceSecurityProfileSettingRequestPb pb) { + GetComplianceSecurityProfileSettingRequest model = + new GetComplianceSecurityProfileSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetComplianceSecurityProfileSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetComplianceSecurityProfileSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetComplianceSecurityProfileSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetComplianceSecurityProfileSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetComplianceSecurityProfileSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetComplianceSecurityProfileSettingRequestPb pb = + mapper.readValue(p, GetComplianceSecurityProfileSettingRequestPb.class); + return GetComplianceSecurityProfileSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequestPb.java new file mode 100755 index 000000000..a749b9c9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the compliance security profile setting */ +@Generated +class GetComplianceSecurityProfileSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetComplianceSecurityProfileSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetComplianceSecurityProfileSettingRequestPb that = + (GetComplianceSecurityProfileSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetComplianceSecurityProfileSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java index 6d23ed820..ea9e108a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java @@ -3,13 +3,28 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the compliance security profile setting for new workspaces */ @Generated +@JsonSerialize( + using = + GetCspEnablementAccountSettingRequest.GetCspEnablementAccountSettingRequestSerializer.class) +@JsonDeserialize( + using = + GetCspEnablementAccountSettingRequest.GetCspEnablementAccountSettingRequestDeserializer + .class) public class GetCspEnablementAccountSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +34,6 @@ public class GetCspEnablementAccountSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetCspEnablementAccountSettingRequest setEtag(String etag) { @@ -49,4 +62,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetCspEnablementAccountSettingRequest.class).add("etag", etag).toString(); } + + GetCspEnablementAccountSettingRequestPb toPb() { + GetCspEnablementAccountSettingRequestPb pb = new GetCspEnablementAccountSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetCspEnablementAccountSettingRequest fromPb(GetCspEnablementAccountSettingRequestPb pb) { + GetCspEnablementAccountSettingRequest model = new GetCspEnablementAccountSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetCspEnablementAccountSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCspEnablementAccountSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCspEnablementAccountSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCspEnablementAccountSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetCspEnablementAccountSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCspEnablementAccountSettingRequestPb pb = + mapper.readValue(p, GetCspEnablementAccountSettingRequestPb.class); + return GetCspEnablementAccountSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequestPb.java new file mode 100755 index 000000000..bafe8bb3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the compliance security profile setting for new workspaces */ +@Generated +class GetCspEnablementAccountSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetCspEnablementAccountSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCspEnablementAccountSettingRequestPb that = (GetCspEnablementAccountSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetCspEnablementAccountSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java index 0c545ca9b..78117fbd3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java @@ -3,13 +3,28 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the Dashboard Email Subscriptions setting */ @Generated +@JsonSerialize( + using = + GetDashboardEmailSubscriptionsRequest.GetDashboardEmailSubscriptionsRequestSerializer.class) +@JsonDeserialize( + using = + GetDashboardEmailSubscriptionsRequest.GetDashboardEmailSubscriptionsRequestDeserializer + .class) public class GetDashboardEmailSubscriptionsRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +34,6 @@ public class GetDashboardEmailSubscriptionsRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetDashboardEmailSubscriptionsRequest setEtag(String etag) { @@ -49,4 +62,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetDashboardEmailSubscriptionsRequest.class).add("etag", etag).toString(); } + + GetDashboardEmailSubscriptionsRequestPb toPb() { + GetDashboardEmailSubscriptionsRequestPb pb = new GetDashboardEmailSubscriptionsRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetDashboardEmailSubscriptionsRequest fromPb(GetDashboardEmailSubscriptionsRequestPb pb) { + GetDashboardEmailSubscriptionsRequest model = new GetDashboardEmailSubscriptionsRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetDashboardEmailSubscriptionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDashboardEmailSubscriptionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDashboardEmailSubscriptionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDashboardEmailSubscriptionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetDashboardEmailSubscriptionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDashboardEmailSubscriptionsRequestPb pb = + mapper.readValue(p, GetDashboardEmailSubscriptionsRequestPb.class); + return GetDashboardEmailSubscriptionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequestPb.java new file mode 100755 index 000000000..82690954a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the Dashboard Email Subscriptions setting */ +@Generated +class GetDashboardEmailSubscriptionsRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDashboardEmailSubscriptionsRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDashboardEmailSubscriptionsRequestPb that = (GetDashboardEmailSubscriptionsRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDashboardEmailSubscriptionsRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java index 7d52bdc9b..5c8117e72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the default namespace setting */ @Generated +@JsonSerialize( + using = GetDefaultNamespaceSettingRequest.GetDefaultNamespaceSettingRequestSerializer.class) +@JsonDeserialize( + using = GetDefaultNamespaceSettingRequest.GetDefaultNamespaceSettingRequestDeserializer.class) public class GetDefaultNamespaceSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class GetDefaultNamespaceSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetDefaultNamespaceSettingRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetDefaultNamespaceSettingRequest.class).add("etag", etag).toString(); } + + GetDefaultNamespaceSettingRequestPb toPb() { + GetDefaultNamespaceSettingRequestPb pb = new GetDefaultNamespaceSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetDefaultNamespaceSettingRequest fromPb(GetDefaultNamespaceSettingRequestPb pb) { + GetDefaultNamespaceSettingRequest model = new GetDefaultNamespaceSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetDefaultNamespaceSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDefaultNamespaceSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDefaultNamespaceSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDefaultNamespaceSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetDefaultNamespaceSettingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDefaultNamespaceSettingRequestPb pb = + mapper.readValue(p, GetDefaultNamespaceSettingRequestPb.class); + return GetDefaultNamespaceSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequestPb.java new file mode 100755 index 000000000..3e20088b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the default namespace setting */ +@Generated +class GetDefaultNamespaceSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDefaultNamespaceSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDefaultNamespaceSettingRequestPb that = (GetDefaultNamespaceSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDefaultNamespaceSettingRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java index 75c0e9208..cd5c339df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java @@ -3,13 +3,24 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Retrieve Legacy Access Disablement Status */ @Generated +@JsonSerialize(using = GetDisableLegacyAccessRequest.GetDisableLegacyAccessRequestSerializer.class) +@JsonDeserialize( + using = GetDisableLegacyAccessRequest.GetDisableLegacyAccessRequestDeserializer.class) public class GetDisableLegacyAccessRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +30,6 @@ public class GetDisableLegacyAccessRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetDisableLegacyAccessRequest setEtag(String etag) { @@ -49,4 +58,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetDisableLegacyAccessRequest.class).add("etag", etag).toString(); } + + GetDisableLegacyAccessRequestPb toPb() { + GetDisableLegacyAccessRequestPb pb = new GetDisableLegacyAccessRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetDisableLegacyAccessRequest fromPb(GetDisableLegacyAccessRequestPb pb) { + GetDisableLegacyAccessRequest model = new GetDisableLegacyAccessRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetDisableLegacyAccessRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDisableLegacyAccessRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDisableLegacyAccessRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDisableLegacyAccessRequestDeserializer + extends JsonDeserializer { + @Override + public GetDisableLegacyAccessRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDisableLegacyAccessRequestPb pb = + mapper.readValue(p, GetDisableLegacyAccessRequestPb.class); + return GetDisableLegacyAccessRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequestPb.java new file mode 100755 index 000000000..80fa1e59d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve Legacy Access Disablement Status */ +@Generated +class GetDisableLegacyAccessRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDisableLegacyAccessRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDisableLegacyAccessRequestPb that = (GetDisableLegacyAccessRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDisableLegacyAccessRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java index d3f3545f9..3b1a41519 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the disable legacy DBFS setting */ @Generated +@JsonSerialize(using = GetDisableLegacyDbfsRequest.GetDisableLegacyDbfsRequestSerializer.class) +@JsonDeserialize(using = GetDisableLegacyDbfsRequest.GetDisableLegacyDbfsRequestDeserializer.class) public class GetDisableLegacyDbfsRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +29,6 @@ public class GetDisableLegacyDbfsRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetDisableLegacyDbfsRequest setEtag(String etag) { @@ -49,4 +57,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetDisableLegacyDbfsRequest.class).add("etag", etag).toString(); } + + GetDisableLegacyDbfsRequestPb toPb() { + GetDisableLegacyDbfsRequestPb pb = new GetDisableLegacyDbfsRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetDisableLegacyDbfsRequest fromPb(GetDisableLegacyDbfsRequestPb pb) { + GetDisableLegacyDbfsRequest model = new GetDisableLegacyDbfsRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetDisableLegacyDbfsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDisableLegacyDbfsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDisableLegacyDbfsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDisableLegacyDbfsRequestDeserializer + extends JsonDeserializer { + @Override + public GetDisableLegacyDbfsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDisableLegacyDbfsRequestPb pb = mapper.readValue(p, GetDisableLegacyDbfsRequestPb.class); + return GetDisableLegacyDbfsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequestPb.java new file mode 100755 index 000000000..e28e06ea1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the disable legacy DBFS setting */ +@Generated +class GetDisableLegacyDbfsRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDisableLegacyDbfsRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDisableLegacyDbfsRequestPb that = (GetDisableLegacyDbfsRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDisableLegacyDbfsRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java index edf3a313d..d3d128e01 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the disable legacy features setting */ @Generated +@JsonSerialize( + using = GetDisableLegacyFeaturesRequest.GetDisableLegacyFeaturesRequestSerializer.class) +@JsonDeserialize( + using = GetDisableLegacyFeaturesRequest.GetDisableLegacyFeaturesRequestDeserializer.class) public class GetDisableLegacyFeaturesRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class GetDisableLegacyFeaturesRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetDisableLegacyFeaturesRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetDisableLegacyFeaturesRequest.class).add("etag", etag).toString(); } + + GetDisableLegacyFeaturesRequestPb toPb() { + GetDisableLegacyFeaturesRequestPb pb = new GetDisableLegacyFeaturesRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetDisableLegacyFeaturesRequest fromPb(GetDisableLegacyFeaturesRequestPb pb) { + GetDisableLegacyFeaturesRequest model = new GetDisableLegacyFeaturesRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetDisableLegacyFeaturesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDisableLegacyFeaturesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDisableLegacyFeaturesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDisableLegacyFeaturesRequestDeserializer + extends JsonDeserializer { + @Override + public GetDisableLegacyFeaturesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDisableLegacyFeaturesRequestPb pb = + mapper.readValue(p, GetDisableLegacyFeaturesRequestPb.class); + return GetDisableLegacyFeaturesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequestPb.java new file mode 100755 index 000000000..55120474c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the disable legacy features setting */ +@Generated +class GetDisableLegacyFeaturesRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDisableLegacyFeaturesRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDisableLegacyFeaturesRequestPb that = (GetDisableLegacyFeaturesRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDisableLegacyFeaturesRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java index 64a6b938c..a85d2360c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the enhanced security monitoring setting */ @Generated +@JsonSerialize( + using = + GetEnhancedSecurityMonitoringSettingRequest + .GetEnhancedSecurityMonitoringSettingRequestSerializer.class) +@JsonDeserialize( + using = + GetEnhancedSecurityMonitoringSettingRequest + .GetEnhancedSecurityMonitoringSettingRequestDeserializer.class) public class GetEnhancedSecurityMonitoringSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetEnhancedSecurityMonitoringSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetEnhancedSecurityMonitoringSettingRequest setEtag(String etag) { @@ -52,4 +66,47 @@ public String toString() { .add("etag", etag) .toString(); } + + GetEnhancedSecurityMonitoringSettingRequestPb toPb() { + GetEnhancedSecurityMonitoringSettingRequestPb pb = + new GetEnhancedSecurityMonitoringSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetEnhancedSecurityMonitoringSettingRequest fromPb( + GetEnhancedSecurityMonitoringSettingRequestPb pb) { + GetEnhancedSecurityMonitoringSettingRequest model = + new GetEnhancedSecurityMonitoringSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetEnhancedSecurityMonitoringSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetEnhancedSecurityMonitoringSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetEnhancedSecurityMonitoringSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEnhancedSecurityMonitoringSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetEnhancedSecurityMonitoringSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEnhancedSecurityMonitoringSettingRequestPb pb = + mapper.readValue(p, GetEnhancedSecurityMonitoringSettingRequestPb.class); + return GetEnhancedSecurityMonitoringSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequestPb.java new file mode 100755 index 000000000..01a652c37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enhanced security monitoring setting */ +@Generated +class GetEnhancedSecurityMonitoringSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetEnhancedSecurityMonitoringSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEnhancedSecurityMonitoringSettingRequestPb that = + (GetEnhancedSecurityMonitoringSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetEnhancedSecurityMonitoringSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java index 2c6b8e50e..4af15c99f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java @@ -3,13 +3,28 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the enhanced security monitoring setting for new workspaces */ @Generated +@JsonSerialize( + using = + GetEsmEnablementAccountSettingRequest.GetEsmEnablementAccountSettingRequestSerializer.class) +@JsonDeserialize( + using = + GetEsmEnablementAccountSettingRequest.GetEsmEnablementAccountSettingRequestDeserializer + .class) public class GetEsmEnablementAccountSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +34,6 @@ public class GetEsmEnablementAccountSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetEsmEnablementAccountSettingRequest setEtag(String etag) { @@ -49,4 +62,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetEsmEnablementAccountSettingRequest.class).add("etag", etag).toString(); } + + GetEsmEnablementAccountSettingRequestPb toPb() { + GetEsmEnablementAccountSettingRequestPb pb = new GetEsmEnablementAccountSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetEsmEnablementAccountSettingRequest fromPb(GetEsmEnablementAccountSettingRequestPb pb) { + GetEsmEnablementAccountSettingRequest model = new GetEsmEnablementAccountSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetEsmEnablementAccountSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetEsmEnablementAccountSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEsmEnablementAccountSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEsmEnablementAccountSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetEsmEnablementAccountSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEsmEnablementAccountSettingRequestPb pb = + mapper.readValue(p, GetEsmEnablementAccountSettingRequestPb.class); + return GetEsmEnablementAccountSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequestPb.java new file mode 100755 index 000000000..e99113d34 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enhanced security monitoring setting for new workspaces */ +@Generated +class GetEsmEnablementAccountSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetEsmEnablementAccountSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEsmEnablementAccountSettingRequestPb that = (GetEsmEnablementAccountSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetEsmEnablementAccountSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java index 092cb17ac..78c9b7771 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get access list */ @Generated +@JsonSerialize(using = GetIpAccessListRequest.GetIpAccessListRequestSerializer.class) +@JsonDeserialize(using = GetIpAccessListRequest.GetIpAccessListRequestDeserializer.class) public class GetIpAccessListRequest { /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; public GetIpAccessListRequest setIpAccessListId(String ipAccessListId) { this.ipAccessListId = ipAccessListId; @@ -41,4 +52,41 @@ public String toString() { .add("ipAccessListId", ipAccessListId) .toString(); } + + GetIpAccessListRequestPb toPb() { + GetIpAccessListRequestPb pb = new GetIpAccessListRequestPb(); + pb.setIpAccessListId(ipAccessListId); + + return pb; + } + + static GetIpAccessListRequest fromPb(GetIpAccessListRequestPb pb) { + GetIpAccessListRequest model = new GetIpAccessListRequest(); + model.setIpAccessListId(pb.getIpAccessListId()); + + return model; + } + + public static class GetIpAccessListRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetIpAccessListRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetIpAccessListRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetIpAccessListRequestDeserializer + extends JsonDeserializer { + @Override + public GetIpAccessListRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetIpAccessListRequestPb pb = mapper.readValue(p, GetIpAccessListRequestPb.class); + return GetIpAccessListRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequestPb.java new file mode 100755 index 000000000..53f74cd03 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get access list */ +@Generated +class GetIpAccessListRequestPb { + @JsonIgnore private String ipAccessListId; + + public GetIpAccessListRequestPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIpAccessListRequestPb that = (GetIpAccessListRequestPb) o; + return Objects.equals(ipAccessListId, that.ipAccessListId); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessListId); + } + + @Override + public String toString() { + return new ToStringer(GetIpAccessListRequestPb.class) + .add("ipAccessListId", ipAccessListId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java index 88afa428f..22002211b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetIpAccessListResponse.GetIpAccessListResponseSerializer.class) +@JsonDeserialize(using = GetIpAccessListResponse.GetIpAccessListResponseDeserializer.class) public class GetIpAccessListResponse { /** Definition of an IP Access list */ - @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; public GetIpAccessListResponse setIpAccessList(IpAccessListInfo ipAccessList) { @@ -41,4 +51,41 @@ public String toString() { .add("ipAccessList", ipAccessList) .toString(); } + + GetIpAccessListResponsePb toPb() { + GetIpAccessListResponsePb pb = new GetIpAccessListResponsePb(); + pb.setIpAccessList(ipAccessList); + + return pb; + } + + static GetIpAccessListResponse fromPb(GetIpAccessListResponsePb pb) { + GetIpAccessListResponse model = new GetIpAccessListResponse(); + model.setIpAccessList(pb.getIpAccessList()); + + return model; + } + + public static class GetIpAccessListResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetIpAccessListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetIpAccessListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetIpAccessListResponseDeserializer + extends JsonDeserializer { + @Override + public GetIpAccessListResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetIpAccessListResponsePb pb = mapper.readValue(p, GetIpAccessListResponsePb.class); + return GetIpAccessListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponsePb.java new file mode 100755 index 000000000..8f870f956 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetIpAccessListResponsePb { + @JsonProperty("ip_access_list") + private IpAccessListInfo ipAccessList; + + public GetIpAccessListResponsePb setIpAccessList(IpAccessListInfo ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessListInfo getIpAccessList() { + return ipAccessList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIpAccessListResponsePb that = (GetIpAccessListResponsePb) o; + return Objects.equals(ipAccessList, that.ipAccessList); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessList); + } + + @Override + public String toString() { + return new ToStringer(GetIpAccessListResponsePb.class) + .add("ipAccessList", ipAccessList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java index c82ac1638..b094780a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** IP access lists were successfully returned. */ @Generated +@JsonSerialize(using = GetIpAccessListsResponse.GetIpAccessListsResponseSerializer.class) +@JsonDeserialize(using = GetIpAccessListsResponse.GetIpAccessListsResponseDeserializer.class) public class GetIpAccessListsResponse { /** */ - @JsonProperty("ip_access_lists") private Collection ipAccessLists; public GetIpAccessListsResponse setIpAccessLists(Collection ipAccessLists) { @@ -43,4 +53,41 @@ public String toString() { .add("ipAccessLists", ipAccessLists) .toString(); } + + GetIpAccessListsResponsePb toPb() { + GetIpAccessListsResponsePb pb = new GetIpAccessListsResponsePb(); + pb.setIpAccessLists(ipAccessLists); + + return pb; + } + + static GetIpAccessListsResponse fromPb(GetIpAccessListsResponsePb pb) { + GetIpAccessListsResponse model = new GetIpAccessListsResponse(); + model.setIpAccessLists(pb.getIpAccessLists()); + + return model; + } + + public static class GetIpAccessListsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetIpAccessListsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetIpAccessListsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetIpAccessListsResponseDeserializer + extends JsonDeserializer { + @Override + public GetIpAccessListsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetIpAccessListsResponsePb pb = mapper.readValue(p, GetIpAccessListsResponsePb.class); + return GetIpAccessListsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponsePb.java new file mode 100755 index 000000000..db7b960a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** IP access lists were successfully returned. */ +@Generated +class GetIpAccessListsResponsePb { + @JsonProperty("ip_access_lists") + private Collection ipAccessLists; + + public GetIpAccessListsResponsePb setIpAccessLists(Collection ipAccessLists) { + this.ipAccessLists = ipAccessLists; + return this; + } + + public Collection getIpAccessLists() { + return ipAccessLists; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIpAccessListsResponsePb that = (GetIpAccessListsResponsePb) o; + return Objects.equals(ipAccessLists, that.ipAccessLists); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessLists); + } + + @Override + public String toString() { + return new ToStringer(GetIpAccessListsResponsePb.class) + .add("ipAccessLists", ipAccessLists) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java index d5fd149f4..4bc69c389 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the enable partner powered AI features account setting */ @Generated +@JsonSerialize( + using = + GetLlmProxyPartnerPoweredAccountRequest.GetLlmProxyPartnerPoweredAccountRequestSerializer + .class) +@JsonDeserialize( + using = + GetLlmProxyPartnerPoweredAccountRequest.GetLlmProxyPartnerPoweredAccountRequestDeserializer + .class) public class GetLlmProxyPartnerPoweredAccountRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetLlmProxyPartnerPoweredAccountRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetLlmProxyPartnerPoweredAccountRequest setEtag(String etag) { @@ -51,4 +65,45 @@ public String toString() { .add("etag", etag) .toString(); } + + GetLlmProxyPartnerPoweredAccountRequestPb toPb() { + GetLlmProxyPartnerPoweredAccountRequestPb pb = new GetLlmProxyPartnerPoweredAccountRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetLlmProxyPartnerPoweredAccountRequest fromPb( + GetLlmProxyPartnerPoweredAccountRequestPb pb) { + GetLlmProxyPartnerPoweredAccountRequest model = new GetLlmProxyPartnerPoweredAccountRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetLlmProxyPartnerPoweredAccountRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLlmProxyPartnerPoweredAccountRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetLlmProxyPartnerPoweredAccountRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLlmProxyPartnerPoweredAccountRequestDeserializer + extends JsonDeserializer { + @Override + public GetLlmProxyPartnerPoweredAccountRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLlmProxyPartnerPoweredAccountRequestPb pb = + mapper.readValue(p, GetLlmProxyPartnerPoweredAccountRequestPb.class); + return GetLlmProxyPartnerPoweredAccountRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequestPb.java new file mode 100755 index 000000000..9a1e05c78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enable partner powered AI features account setting */ +@Generated +class GetLlmProxyPartnerPoweredAccountRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredAccountRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredAccountRequestPb that = (GetLlmProxyPartnerPoweredAccountRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredAccountRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java index 63e690981..5e77cd3e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the enforcement status of partner powered AI features account setting */ @Generated +@JsonSerialize( + using = + GetLlmProxyPartnerPoweredEnforceRequest.GetLlmProxyPartnerPoweredEnforceRequestSerializer + .class) +@JsonDeserialize( + using = + GetLlmProxyPartnerPoweredEnforceRequest.GetLlmProxyPartnerPoweredEnforceRequestDeserializer + .class) public class GetLlmProxyPartnerPoweredEnforceRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetLlmProxyPartnerPoweredEnforceRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetLlmProxyPartnerPoweredEnforceRequest setEtag(String etag) { @@ -51,4 +65,45 @@ public String toString() { .add("etag", etag) .toString(); } + + GetLlmProxyPartnerPoweredEnforceRequestPb toPb() { + GetLlmProxyPartnerPoweredEnforceRequestPb pb = new GetLlmProxyPartnerPoweredEnforceRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetLlmProxyPartnerPoweredEnforceRequest fromPb( + GetLlmProxyPartnerPoweredEnforceRequestPb pb) { + GetLlmProxyPartnerPoweredEnforceRequest model = new GetLlmProxyPartnerPoweredEnforceRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetLlmProxyPartnerPoweredEnforceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLlmProxyPartnerPoweredEnforceRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetLlmProxyPartnerPoweredEnforceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLlmProxyPartnerPoweredEnforceRequestDeserializer + extends JsonDeserializer { + @Override + public GetLlmProxyPartnerPoweredEnforceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLlmProxyPartnerPoweredEnforceRequestPb pb = + mapper.readValue(p, GetLlmProxyPartnerPoweredEnforceRequestPb.class); + return GetLlmProxyPartnerPoweredEnforceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequestPb.java new file mode 100755 index 000000000..29e652f24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enforcement status of partner powered AI features account setting */ +@Generated +class GetLlmProxyPartnerPoweredEnforceRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredEnforceRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredEnforceRequestPb that = (GetLlmProxyPartnerPoweredEnforceRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredEnforceRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java index b149178f4..279f5ef1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the enable partner powered AI features workspace setting */ @Generated +@JsonSerialize( + using = + GetLlmProxyPartnerPoweredWorkspaceRequest + .GetLlmProxyPartnerPoweredWorkspaceRequestSerializer.class) +@JsonDeserialize( + using = + GetLlmProxyPartnerPoweredWorkspaceRequest + .GetLlmProxyPartnerPoweredWorkspaceRequestDeserializer.class) public class GetLlmProxyPartnerPoweredWorkspaceRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetLlmProxyPartnerPoweredWorkspaceRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) { @@ -51,4 +65,47 @@ public String toString() { .add("etag", etag) .toString(); } + + GetLlmProxyPartnerPoweredWorkspaceRequestPb toPb() { + GetLlmProxyPartnerPoweredWorkspaceRequestPb pb = + new GetLlmProxyPartnerPoweredWorkspaceRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetLlmProxyPartnerPoweredWorkspaceRequest fromPb( + GetLlmProxyPartnerPoweredWorkspaceRequestPb pb) { + GetLlmProxyPartnerPoweredWorkspaceRequest model = + new GetLlmProxyPartnerPoweredWorkspaceRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetLlmProxyPartnerPoweredWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetLlmProxyPartnerPoweredWorkspaceRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetLlmProxyPartnerPoweredWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetLlmProxyPartnerPoweredWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public GetLlmProxyPartnerPoweredWorkspaceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetLlmProxyPartnerPoweredWorkspaceRequestPb pb = + mapper.readValue(p, GetLlmProxyPartnerPoweredWorkspaceRequestPb.class); + return GetLlmProxyPartnerPoweredWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequestPb.java new file mode 100755 index 000000000..0d546a6e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enable partner powered AI features workspace setting */ +@Generated +class GetLlmProxyPartnerPoweredWorkspaceRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredWorkspaceRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredWorkspaceRequestPb that = + (GetLlmProxyPartnerPoweredWorkspaceRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredWorkspaceRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java index af3c9a1c5..356ff816c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a network connectivity configuration */ @Generated +@JsonSerialize( + using = + GetNetworkConnectivityConfigurationRequest + .GetNetworkConnectivityConfigurationRequestSerializer.class) +@JsonDeserialize( + using = + GetNetworkConnectivityConfigurationRequest + .GetNetworkConnectivityConfigurationRequestDeserializer.class) public class GetNetworkConnectivityConfigurationRequest { /** Your Network Connectivity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; public GetNetworkConnectivityConfigurationRequest setNetworkConnectivityConfigId( String networkConnectivityConfigId) { @@ -43,4 +60,47 @@ public String toString() { .add("networkConnectivityConfigId", networkConnectivityConfigId) .toString(); } + + GetNetworkConnectivityConfigurationRequestPb toPb() { + GetNetworkConnectivityConfigurationRequestPb pb = + new GetNetworkConnectivityConfigurationRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + + return pb; + } + + static GetNetworkConnectivityConfigurationRequest fromPb( + GetNetworkConnectivityConfigurationRequestPb pb) { + GetNetworkConnectivityConfigurationRequest model = + new GetNetworkConnectivityConfigurationRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + + return model; + } + + public static class GetNetworkConnectivityConfigurationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetNetworkConnectivityConfigurationRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetNetworkConnectivityConfigurationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetNetworkConnectivityConfigurationRequestDeserializer + extends JsonDeserializer { + @Override + public GetNetworkConnectivityConfigurationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetNetworkConnectivityConfigurationRequestPb pb = + mapper.readValue(p, GetNetworkConnectivityConfigurationRequestPb.class); + return GetNetworkConnectivityConfigurationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequestPb.java new file mode 100755 index 000000000..b6d2aba20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a network connectivity configuration */ +@Generated +class GetNetworkConnectivityConfigurationRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + public GetNetworkConnectivityConfigurationRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetNetworkConnectivityConfigurationRequestPb that = + (GetNetworkConnectivityConfigurationRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId); + } + + @Override + public String toString() { + return new ToStringer(GetNetworkConnectivityConfigurationRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java index 754bd62c0..8fb47cb9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a network policy */ @Generated +@JsonSerialize(using = GetNetworkPolicyRequest.GetNetworkPolicyRequestSerializer.class) +@JsonDeserialize(using = GetNetworkPolicyRequest.GetNetworkPolicyRequestDeserializer.class) public class GetNetworkPolicyRequest { /** The unique identifier of the network policy to retrieve. */ - @JsonIgnore private String networkPolicyId; + private String networkPolicyId; public GetNetworkPolicyRequest setNetworkPolicyId(String networkPolicyId) { this.networkPolicyId = networkPolicyId; @@ -41,4 +52,41 @@ public String toString() { .add("networkPolicyId", networkPolicyId) .toString(); } + + GetNetworkPolicyRequestPb toPb() { + GetNetworkPolicyRequestPb pb = new GetNetworkPolicyRequestPb(); + pb.setNetworkPolicyId(networkPolicyId); + + return pb; + } + + static GetNetworkPolicyRequest fromPb(GetNetworkPolicyRequestPb pb) { + GetNetworkPolicyRequest model = new GetNetworkPolicyRequest(); + model.setNetworkPolicyId(pb.getNetworkPolicyId()); + + return model; + } + + public static class GetNetworkPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetNetworkPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetNetworkPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetNetworkPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetNetworkPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetNetworkPolicyRequestPb pb = mapper.readValue(p, GetNetworkPolicyRequestPb.class); + return GetNetworkPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequestPb.java new file mode 100755 index 000000000..47532a759 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a network policy */ +@Generated +class GetNetworkPolicyRequestPb { + @JsonIgnore private String networkPolicyId; + + public GetNetworkPolicyRequestPb setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetNetworkPolicyRequestPb that = (GetNetworkPolicyRequestPb) o; + return Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(GetNetworkPolicyRequestPb.class) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java index da8dd0f1f..2ace9e17b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a notification destination */ @Generated +@JsonSerialize( + using = GetNotificationDestinationRequest.GetNotificationDestinationRequestSerializer.class) +@JsonDeserialize( + using = GetNotificationDestinationRequest.GetNotificationDestinationRequestDeserializer.class) public class GetNotificationDestinationRequest { /** */ - @JsonIgnore private String id; + private String id; public GetNotificationDestinationRequest setId(String id) { this.id = id; @@ -39,4 +52,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetNotificationDestinationRequest.class).add("id", id).toString(); } + + GetNotificationDestinationRequestPb toPb() { + GetNotificationDestinationRequestPb pb = new GetNotificationDestinationRequestPb(); + pb.setId(id); + + return pb; + } + + static GetNotificationDestinationRequest fromPb(GetNotificationDestinationRequestPb pb) { + GetNotificationDestinationRequest model = new GetNotificationDestinationRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetNotificationDestinationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetNotificationDestinationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetNotificationDestinationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetNotificationDestinationRequestDeserializer + extends JsonDeserializer { + @Override + public GetNotificationDestinationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetNotificationDestinationRequestPb pb = + mapper.readValue(p, GetNotificationDestinationRequestPb.class); + return GetNotificationDestinationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequestPb.java new file mode 100755 index 000000000..71b293b57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a notification destination */ +@Generated +class GetNotificationDestinationRequestPb { + @JsonIgnore private String id; + + public GetNotificationDestinationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetNotificationDestinationRequestPb that = (GetNotificationDestinationRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetNotificationDestinationRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java index a3deaab78..f6e91453d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java @@ -3,13 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get Personal Compute setting */ @Generated +@JsonSerialize( + using = GetPersonalComputeSettingRequest.GetPersonalComputeSettingRequestSerializer.class) +@JsonDeserialize( + using = GetPersonalComputeSettingRequest.GetPersonalComputeSettingRequestDeserializer.class) public class GetPersonalComputeSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +31,6 @@ public class GetPersonalComputeSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetPersonalComputeSettingRequest setEtag(String etag) { @@ -49,4 +59,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetPersonalComputeSettingRequest.class).add("etag", etag).toString(); } + + GetPersonalComputeSettingRequestPb toPb() { + GetPersonalComputeSettingRequestPb pb = new GetPersonalComputeSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetPersonalComputeSettingRequest fromPb(GetPersonalComputeSettingRequestPb pb) { + GetPersonalComputeSettingRequest model = new GetPersonalComputeSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetPersonalComputeSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPersonalComputeSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPersonalComputeSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPersonalComputeSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetPersonalComputeSettingRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPersonalComputeSettingRequestPb pb = + mapper.readValue(p, GetPersonalComputeSettingRequestPb.class); + return GetPersonalComputeSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequestPb.java new file mode 100755 index 000000000..2a6e1cfc2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get Personal Compute setting */ +@Generated +class GetPersonalComputeSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetPersonalComputeSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPersonalComputeSettingRequestPb that = (GetPersonalComputeSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetPersonalComputeSettingRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java index e34e82d41..33df245c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java @@ -4,17 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Gets a private endpoint rule */ @Generated +@JsonSerialize(using = GetPrivateEndpointRuleRequest.GetPrivateEndpointRuleRequestSerializer.class) +@JsonDeserialize( + using = GetPrivateEndpointRuleRequest.GetPrivateEndpointRuleRequestDeserializer.class) public class GetPrivateEndpointRuleRequest { /** Your Network Connectvity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; /** Your private endpoint rule ID. */ - @JsonIgnore private String privateEndpointRuleId; + private String privateEndpointRuleId; public GetPrivateEndpointRuleRequest setNetworkConnectivityConfigId( String networkConnectivityConfigId) { @@ -56,4 +68,44 @@ public String toString() { .add("privateEndpointRuleId", privateEndpointRuleId) .toString(); } + + GetPrivateEndpointRuleRequestPb toPb() { + GetPrivateEndpointRuleRequestPb pb = new GetPrivateEndpointRuleRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setPrivateEndpointRuleId(privateEndpointRuleId); + + return pb; + } + + static GetPrivateEndpointRuleRequest fromPb(GetPrivateEndpointRuleRequestPb pb) { + GetPrivateEndpointRuleRequest model = new GetPrivateEndpointRuleRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setPrivateEndpointRuleId(pb.getPrivateEndpointRuleId()); + + return model; + } + + public static class GetPrivateEndpointRuleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetPrivateEndpointRuleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetPrivateEndpointRuleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetPrivateEndpointRuleRequestDeserializer + extends JsonDeserializer { + @Override + public GetPrivateEndpointRuleRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetPrivateEndpointRuleRequestPb pb = + mapper.readValue(p, GetPrivateEndpointRuleRequestPb.class); + return GetPrivateEndpointRuleRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequestPb.java new file mode 100755 index 000000000..be139558e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Gets a private endpoint rule */ +@Generated +class GetPrivateEndpointRuleRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + @JsonIgnore private String privateEndpointRuleId; + + public GetPrivateEndpointRuleRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public GetPrivateEndpointRuleRequestPb setPrivateEndpointRuleId(String privateEndpointRuleId) { + this.privateEndpointRuleId = privateEndpointRuleId; + return this; + } + + public String getPrivateEndpointRuleId() { + return privateEndpointRuleId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPrivateEndpointRuleRequestPb that = (GetPrivateEndpointRuleRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId, privateEndpointRuleId); + } + + @Override + public String toString() { + return new ToStringer(GetPrivateEndpointRuleRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("privateEndpointRuleId", privateEndpointRuleId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequest.java index 56bd86a10..3c87ce11e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequest.java @@ -3,13 +3,29 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the restrict workspace admins setting */ @Generated +@JsonSerialize( + using = + GetRestrictWorkspaceAdminsSettingRequest.GetRestrictWorkspaceAdminsSettingRequestSerializer + .class) +@JsonDeserialize( + using = + GetRestrictWorkspaceAdminsSettingRequest + .GetRestrictWorkspaceAdminsSettingRequestDeserializer.class) public class GetRestrictWorkspaceAdminsSettingRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +35,6 @@ public class GetRestrictWorkspaceAdminsSettingRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetRestrictWorkspaceAdminsSettingRequest setEtag(String etag) { @@ -51,4 +65,46 @@ public String toString() { .add("etag", etag) .toString(); } + + GetRestrictWorkspaceAdminsSettingRequestPb toPb() { + GetRestrictWorkspaceAdminsSettingRequestPb pb = + new GetRestrictWorkspaceAdminsSettingRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetRestrictWorkspaceAdminsSettingRequest fromPb( + GetRestrictWorkspaceAdminsSettingRequestPb pb) { + GetRestrictWorkspaceAdminsSettingRequest model = new GetRestrictWorkspaceAdminsSettingRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetRestrictWorkspaceAdminsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRestrictWorkspaceAdminsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetRestrictWorkspaceAdminsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRestrictWorkspaceAdminsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public GetRestrictWorkspaceAdminsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRestrictWorkspaceAdminsSettingRequestPb pb = + mapper.readValue(p, GetRestrictWorkspaceAdminsSettingRequestPb.class); + return GetRestrictWorkspaceAdminsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequestPb.java new file mode 100755 index 000000000..c9983c302 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetRestrictWorkspaceAdminsSettingRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the restrict workspace admins setting */ +@Generated +class GetRestrictWorkspaceAdminsSettingRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetRestrictWorkspaceAdminsSettingRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRestrictWorkspaceAdminsSettingRequestPb that = + (GetRestrictWorkspaceAdminsSettingRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetRestrictWorkspaceAdminsSettingRequestPb.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequest.java index c9cb75cc7..08e49760d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequest.java @@ -3,13 +3,24 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get the SQL Results Download setting */ @Generated +@JsonSerialize(using = GetSqlResultsDownloadRequest.GetSqlResultsDownloadRequestSerializer.class) +@JsonDeserialize( + using = GetSqlResultsDownloadRequest.GetSqlResultsDownloadRequestDeserializer.class) public class GetSqlResultsDownloadRequest { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -19,8 +30,6 @@ public class GetSqlResultsDownloadRequest { * an etag from a GET request, and pass it with the DELETE request to identify the rule set * version you are deleting. */ - @JsonIgnore - @QueryParam("etag") private String etag; public GetSqlResultsDownloadRequest setEtag(String etag) { @@ -49,4 +58,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetSqlResultsDownloadRequest.class).add("etag", etag).toString(); } + + GetSqlResultsDownloadRequestPb toPb() { + GetSqlResultsDownloadRequestPb pb = new GetSqlResultsDownloadRequestPb(); + pb.setEtag(etag); + + return pb; + } + + static GetSqlResultsDownloadRequest fromPb(GetSqlResultsDownloadRequestPb pb) { + GetSqlResultsDownloadRequest model = new GetSqlResultsDownloadRequest(); + model.setEtag(pb.getEtag()); + + return model; + } + + public static class GetSqlResultsDownloadRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetSqlResultsDownloadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSqlResultsDownloadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSqlResultsDownloadRequestDeserializer + extends JsonDeserializer { + @Override + public GetSqlResultsDownloadRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSqlResultsDownloadRequestPb pb = mapper.readValue(p, GetSqlResultsDownloadRequestPb.class); + return GetSqlResultsDownloadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequestPb.java new file mode 100755 index 000000000..b96166d18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetSqlResultsDownloadRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the SQL Results Download setting */ +@Generated +class GetSqlResultsDownloadRequestPb { + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetSqlResultsDownloadRequestPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSqlResultsDownloadRequestPb that = (GetSqlResultsDownloadRequestPb) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetSqlResultsDownloadRequestPb.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequest.java index a086ce44d..60e82ec8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Check configuration status */ @Generated +@JsonSerialize(using = GetStatusRequest.GetStatusRequestSerializer.class) +@JsonDeserialize(using = GetStatusRequest.GetStatusRequestDeserializer.class) public class GetStatusRequest { /** */ - @JsonIgnore - @QueryParam("keys") private String keys; public GetStatusRequest setKeys(String keys) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetStatusRequest.class).add("keys", keys).toString(); } + + GetStatusRequestPb toPb() { + GetStatusRequestPb pb = new GetStatusRequestPb(); + pb.setKeys(keys); + + return pb; + } + + static GetStatusRequest fromPb(GetStatusRequestPb pb) { + GetStatusRequest model = new GetStatusRequest(); + model.setKeys(pb.getKeys()); + + return model; + } + + public static class GetStatusRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetStatusRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStatusRequestDeserializer extends JsonDeserializer { + @Override + public GetStatusRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStatusRequestPb pb = mapper.readValue(p, GetStatusRequestPb.class); + return GetStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequestPb.java new file mode 100755 index 000000000..c67ab26d1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Check configuration status */ +@Generated +class GetStatusRequestPb { + @JsonIgnore + @QueryParam("keys") + private String keys; + + public GetStatusRequestPb setKeys(String keys) { + this.keys = keys; + return this; + } + + public String getKeys() { + return keys; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStatusRequestPb that = (GetStatusRequestPb) o; + return Objects.equals(keys, that.keys); + } + + @Override + public int hashCode() { + return Objects.hash(keys); + } + + @Override + public String toString() { + return new ToStringer(GetStatusRequestPb.class).add("keys", keys).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java index 7c1292d9d..b3e382d54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get token info */ @Generated +@JsonSerialize(using = GetTokenManagementRequest.GetTokenManagementRequestSerializer.class) +@JsonDeserialize(using = GetTokenManagementRequest.GetTokenManagementRequestDeserializer.class) public class GetTokenManagementRequest { /** The ID of the token to get. */ - @JsonIgnore private String tokenId; + private String tokenId; public GetTokenManagementRequest setTokenId(String tokenId) { this.tokenId = tokenId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetTokenManagementRequest.class).add("tokenId", tokenId).toString(); } + + GetTokenManagementRequestPb toPb() { + GetTokenManagementRequestPb pb = new GetTokenManagementRequestPb(); + pb.setTokenId(tokenId); + + return pb; + } + + static GetTokenManagementRequest fromPb(GetTokenManagementRequestPb pb) { + GetTokenManagementRequest model = new GetTokenManagementRequest(); + model.setTokenId(pb.getTokenId()); + + return model; + } + + public static class GetTokenManagementRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetTokenManagementRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetTokenManagementRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetTokenManagementRequestDeserializer + extends JsonDeserializer { + @Override + public GetTokenManagementRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetTokenManagementRequestPb pb = mapper.readValue(p, GetTokenManagementRequestPb.class); + return GetTokenManagementRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequestPb.java new file mode 100755 index 000000000..0d8e89cb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get token info */ +@Generated +class GetTokenManagementRequestPb { + @JsonIgnore private String tokenId; + + public GetTokenManagementRequestPb setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTokenManagementRequestPb that = (GetTokenManagementRequestPb) o; + return Objects.equals(tokenId, that.tokenId); + } + + @Override + public int hashCode() { + return Objects.hash(tokenId); + } + + @Override + public String toString() { + return new ToStringer(GetTokenManagementRequestPb.class).add("tokenId", tokenId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java index db59cc45a..e503859dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetTokenPermissionLevelsResponse.GetTokenPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = GetTokenPermissionLevelsResponse.GetTokenPermissionLevelsResponseDeserializer.class) public class GetTokenPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetTokenPermissionLevelsResponse setPermissionLevels( @@ -43,4 +55,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetTokenPermissionLevelsResponsePb toPb() { + GetTokenPermissionLevelsResponsePb pb = new GetTokenPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetTokenPermissionLevelsResponse fromPb(GetTokenPermissionLevelsResponsePb pb) { + GetTokenPermissionLevelsResponse model = new GetTokenPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetTokenPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetTokenPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetTokenPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetTokenPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetTokenPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetTokenPermissionLevelsResponsePb pb = + mapper.readValue(p, GetTokenPermissionLevelsResponsePb.class); + return GetTokenPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponsePb.java new file mode 100755 index 000000000..8408289fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetTokenPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetTokenPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTokenPermissionLevelsResponsePb that = (GetTokenPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetTokenPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java index 2d4a5d80e..b3aad8cf5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Token with specified Token ID was successfully returned. */ @Generated +@JsonSerialize(using = GetTokenResponse.GetTokenResponseSerializer.class) +@JsonDeserialize(using = GetTokenResponse.GetTokenResponseDeserializer.class) public class GetTokenResponse { /** */ - @JsonProperty("token_info") private TokenInfo tokenInfo; public GetTokenResponse setTokenInfo(TokenInfo tokenInfo) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetTokenResponse.class).add("tokenInfo", tokenInfo).toString(); } + + GetTokenResponsePb toPb() { + GetTokenResponsePb pb = new GetTokenResponsePb(); + pb.setTokenInfo(tokenInfo); + + return pb; + } + + static GetTokenResponse fromPb(GetTokenResponsePb pb) { + GetTokenResponse model = new GetTokenResponse(); + model.setTokenInfo(pb.getTokenInfo()); + + return model; + } + + public static class GetTokenResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetTokenResponseDeserializer extends JsonDeserializer { + @Override + public GetTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetTokenResponsePb pb = mapper.readValue(p, GetTokenResponsePb.class); + return GetTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponsePb.java new file mode 100755 index 000000000..59127f6e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Token with specified Token ID was successfully returned. */ +@Generated +class GetTokenResponsePb { + @JsonProperty("token_info") + private TokenInfo tokenInfo; + + public GetTokenResponsePb setTokenInfo(TokenInfo tokenInfo) { + this.tokenInfo = tokenInfo; + return this; + } + + public TokenInfo getTokenInfo() { + return tokenInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTokenResponsePb that = (GetTokenResponsePb) o; + return Objects.equals(tokenInfo, that.tokenInfo); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfo); + } + + @Override + public String toString() { + return new ToStringer(GetTokenResponsePb.class).add("tokenInfo", tokenInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java index c3f99bf5d..cfc6e41f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get workspace network option */ @Generated +@JsonSerialize( + using = GetWorkspaceNetworkOptionRequest.GetWorkspaceNetworkOptionRequestSerializer.class) +@JsonDeserialize( + using = GetWorkspaceNetworkOptionRequest.GetWorkspaceNetworkOptionRequestDeserializer.class) public class GetWorkspaceNetworkOptionRequest { /** The workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; public GetWorkspaceNetworkOptionRequest setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; @@ -41,4 +54,42 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + GetWorkspaceNetworkOptionRequestPb toPb() { + GetWorkspaceNetworkOptionRequestPb pb = new GetWorkspaceNetworkOptionRequestPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static GetWorkspaceNetworkOptionRequest fromPb(GetWorkspaceNetworkOptionRequestPb pb) { + GetWorkspaceNetworkOptionRequest model = new GetWorkspaceNetworkOptionRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class GetWorkspaceNetworkOptionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceNetworkOptionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceNetworkOptionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceNetworkOptionRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceNetworkOptionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceNetworkOptionRequestPb pb = + mapper.readValue(p, GetWorkspaceNetworkOptionRequestPb.class); + return GetWorkspaceNetworkOptionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequestPb.java new file mode 100755 index 000000000..9625459c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get workspace network option */ +@Generated +class GetWorkspaceNetworkOptionRequestPb { + @JsonIgnore private Long workspaceId; + + public GetWorkspaceNetworkOptionRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceNetworkOptionRequestPb that = (GetWorkspaceNetworkOptionRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceNetworkOptionRequestPb.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java index c2ac51992..2f97d0d25 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java @@ -4,39 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Definition of an IP Access list */ @Generated +@JsonSerialize(using = IpAccessListInfo.IpAccessListInfoSerializer.class) +@JsonDeserialize(using = IpAccessListInfo.IpAccessListInfoDeserializer.class) public class IpAccessListInfo { /** Total number of IP or CIDR values. */ - @JsonProperty("address_count") private Long addressCount; /** Creation timestamp in milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** User ID of the user who created this list. */ - @JsonProperty("created_by") private Long createdBy; /** Specifies whether this IP access list is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** */ - @JsonProperty("ip_addresses") private Collection ipAddresses; /** Label for the IP access list. This **cannot** be empty. */ - @JsonProperty("label") private String label; /** Universally unique identifier (UUID) of the IP access list. */ - @JsonProperty("list_id") private String listId; /** @@ -46,15 +50,12 @@ public class IpAccessListInfo { * or range. IP addresses in the block list are excluded even if they are included in an allow * list. */ - @JsonProperty("list_type") private ListType listType; /** Update timestamp in milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** User ID of the user who updated this list. */ - @JsonProperty("updated_by") private Long updatedBy; public IpAccessListInfo setAddressCount(Long addressCount) { @@ -194,4 +195,56 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + IpAccessListInfoPb toPb() { + IpAccessListInfoPb pb = new IpAccessListInfoPb(); + pb.setAddressCount(addressCount); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setEnabled(enabled); + pb.setIpAddresses(ipAddresses); + pb.setLabel(label); + pb.setListId(listId); + pb.setListType(listType); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static IpAccessListInfo fromPb(IpAccessListInfoPb pb) { + IpAccessListInfo model = new IpAccessListInfo(); + model.setAddressCount(pb.getAddressCount()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setEnabled(pb.getEnabled()); + model.setIpAddresses(pb.getIpAddresses()); + model.setLabel(pb.getLabel()); + model.setListId(pb.getListId()); + model.setListType(pb.getListType()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class IpAccessListInfoSerializer extends JsonSerializer { + @Override + public void serialize(IpAccessListInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + IpAccessListInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class IpAccessListInfoDeserializer extends JsonDeserializer { + @Override + public IpAccessListInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + IpAccessListInfoPb pb = mapper.readValue(p, IpAccessListInfoPb.class); + return IpAccessListInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfoPb.java new file mode 100755 index 000000000..70e829478 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfoPb.java @@ -0,0 +1,181 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Definition of an IP Access list */ +@Generated +class IpAccessListInfoPb { + @JsonProperty("address_count") + private Long addressCount; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private Long createdBy; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("ip_addresses") + private Collection ipAddresses; + + @JsonProperty("label") + private String label; + + @JsonProperty("list_id") + private String listId; + + @JsonProperty("list_type") + private ListType listType; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private Long updatedBy; + + public IpAccessListInfoPb setAddressCount(Long addressCount) { + this.addressCount = addressCount; + return this; + } + + public Long getAddressCount() { + return addressCount; + } + + public IpAccessListInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public IpAccessListInfoPb setCreatedBy(Long createdBy) { + this.createdBy = createdBy; + return this; + } + + public Long getCreatedBy() { + return createdBy; + } + + public IpAccessListInfoPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public IpAccessListInfoPb setIpAddresses(Collection ipAddresses) { + this.ipAddresses = ipAddresses; + return this; + } + + public Collection getIpAddresses() { + return ipAddresses; + } + + public IpAccessListInfoPb setLabel(String label) { + this.label = label; + return this; + } + + public String getLabel() { + return label; + } + + public IpAccessListInfoPb setListId(String listId) { + this.listId = listId; + return this; + } + + public String getListId() { + return listId; + } + + public IpAccessListInfoPb setListType(ListType listType) { + this.listType = listType; + return this; + } + + public ListType getListType() { + return listType; + } + + public IpAccessListInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public IpAccessListInfoPb setUpdatedBy(Long updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public Long getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IpAccessListInfoPb that = (IpAccessListInfoPb) o; + return Objects.equals(addressCount, that.addressCount) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(enabled, that.enabled) + && Objects.equals(ipAddresses, that.ipAddresses) + && Objects.equals(label, that.label) + && Objects.equals(listId, that.listId) + && Objects.equals(listType, that.listType) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + addressCount, + createdAt, + createdBy, + enabled, + ipAddresses, + label, + listId, + listType, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(IpAccessListInfoPb.class) + .add("addressCount", addressCount) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("enabled", enabled) + .add("ipAddresses", ipAddresses) + .add("label", label) + .add("listId", listId) + .add("listType", listType) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java index b9cca1598..e5b60abca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java @@ -21,7 +21,7 @@ public CreateIpAccessListResponse create(CreateIpAccessList request) { String path = "/api/2.0/ip-access-lists"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateIpAccessListResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteIpAccessListRequest request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public FetchIpAccessListResponse get(GetIpAccessListRequest request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FetchIpAccessListResponse.class); } catch (IOException e) { @@ -72,7 +72,7 @@ public void replace(ReplaceIpAccessList request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ReplaceResponse.class); } catch (IOException e) { @@ -85,7 +85,7 @@ public void update(UpdateIpAccessList request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java index 955a0efcc..ea0a2ee9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** IP access lists were successfully returned. */ @Generated +@JsonSerialize(using = ListIpAccessListResponse.ListIpAccessListResponseSerializer.class) +@JsonDeserialize(using = ListIpAccessListResponse.ListIpAccessListResponseDeserializer.class) public class ListIpAccessListResponse { /** */ - @JsonProperty("ip_access_lists") private Collection ipAccessLists; public ListIpAccessListResponse setIpAccessLists(Collection ipAccessLists) { @@ -43,4 +53,41 @@ public String toString() { .add("ipAccessLists", ipAccessLists) .toString(); } + + ListIpAccessListResponsePb toPb() { + ListIpAccessListResponsePb pb = new ListIpAccessListResponsePb(); + pb.setIpAccessLists(ipAccessLists); + + return pb; + } + + static ListIpAccessListResponse fromPb(ListIpAccessListResponsePb pb) { + ListIpAccessListResponse model = new ListIpAccessListResponse(); + model.setIpAccessLists(pb.getIpAccessLists()); + + return model; + } + + public static class ListIpAccessListResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListIpAccessListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListIpAccessListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListIpAccessListResponseDeserializer + extends JsonDeserializer { + @Override + public ListIpAccessListResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListIpAccessListResponsePb pb = mapper.readValue(p, ListIpAccessListResponsePb.class); + return ListIpAccessListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponsePb.java new file mode 100755 index 000000000..d095c3843 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** IP access lists were successfully returned. */ +@Generated +class ListIpAccessListResponsePb { + @JsonProperty("ip_access_lists") + private Collection ipAccessLists; + + public ListIpAccessListResponsePb setIpAccessLists(Collection ipAccessLists) { + this.ipAccessLists = ipAccessLists; + return this; + } + + public Collection getIpAccessLists() { + return ipAccessLists; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListIpAccessListResponsePb that = (ListIpAccessListResponsePb) o; + return Objects.equals(ipAccessLists, that.ipAccessLists); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessLists); + } + + @Override + public String toString() { + return new ToStringer(ListIpAccessListResponsePb.class) + .add("ipAccessLists", ipAccessLists) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java index 39421aaa9..d1c432885 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java @@ -3,17 +3,31 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List network connectivity configurations */ @Generated +@JsonSerialize( + using = + ListNetworkConnectivityConfigurationsRequest + .ListNetworkConnectivityConfigurationsRequestSerializer.class) +@JsonDeserialize( + using = + ListNetworkConnectivityConfigurationsRequest + .ListNetworkConnectivityConfigurationsRequestDeserializer.class) public class ListNetworkConnectivityConfigurationsRequest { /** Pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListNetworkConnectivityConfigurationsRequest setPageToken(String pageToken) { @@ -45,4 +59,47 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListNetworkConnectivityConfigurationsRequestPb toPb() { + ListNetworkConnectivityConfigurationsRequestPb pb = + new ListNetworkConnectivityConfigurationsRequestPb(); + pb.setPageToken(pageToken); + + return pb; + } + + static ListNetworkConnectivityConfigurationsRequest fromPb( + ListNetworkConnectivityConfigurationsRequestPb pb) { + ListNetworkConnectivityConfigurationsRequest model = + new ListNetworkConnectivityConfigurationsRequest(); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListNetworkConnectivityConfigurationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNetworkConnectivityConfigurationsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListNetworkConnectivityConfigurationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNetworkConnectivityConfigurationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListNetworkConnectivityConfigurationsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNetworkConnectivityConfigurationsRequestPb pb = + mapper.readValue(p, ListNetworkConnectivityConfigurationsRequestPb.class); + return ListNetworkConnectivityConfigurationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequestPb.java new file mode 100755 index 000000000..207d6937f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequestPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List network connectivity configurations */ +@Generated +class ListNetworkConnectivityConfigurationsRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListNetworkConnectivityConfigurationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkConnectivityConfigurationsRequestPb that = + (ListNetworkConnectivityConfigurationsRequestPb) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkConnectivityConfigurationsRequestPb.class) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java index 1dc5b5042..afbe24287 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java @@ -4,22 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The network connectivity configuration list was successfully retrieved. */ @Generated +@JsonSerialize( + using = + ListNetworkConnectivityConfigurationsResponse + .ListNetworkConnectivityConfigurationsResponseSerializer.class) +@JsonDeserialize( + using = + ListNetworkConnectivityConfigurationsResponse + .ListNetworkConnectivityConfigurationsResponseDeserializer.class) public class ListNetworkConnectivityConfigurationsResponse { /** */ - @JsonProperty("items") private Collection items; /** * A token that can be used to get the next page of results. If null, there are no more results to * show. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListNetworkConnectivityConfigurationsResponse setItems( @@ -62,4 +77,49 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListNetworkConnectivityConfigurationsResponsePb toPb() { + ListNetworkConnectivityConfigurationsResponsePb pb = + new ListNetworkConnectivityConfigurationsResponsePb(); + pb.setItems(items); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListNetworkConnectivityConfigurationsResponse fromPb( + ListNetworkConnectivityConfigurationsResponsePb pb) { + ListNetworkConnectivityConfigurationsResponse model = + new ListNetworkConnectivityConfigurationsResponse(); + model.setItems(pb.getItems()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListNetworkConnectivityConfigurationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNetworkConnectivityConfigurationsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + ListNetworkConnectivityConfigurationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNetworkConnectivityConfigurationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListNetworkConnectivityConfigurationsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNetworkConnectivityConfigurationsResponsePb pb = + mapper.readValue(p, ListNetworkConnectivityConfigurationsResponsePb.class); + return ListNetworkConnectivityConfigurationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponsePb.java new file mode 100755 index 000000000..76c5d7771 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponsePb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The network connectivity configuration list was successfully retrieved. */ +@Generated +class ListNetworkConnectivityConfigurationsResponsePb { + @JsonProperty("items") + private Collection items; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListNetworkConnectivityConfigurationsResponsePb setItems( + Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListNetworkConnectivityConfigurationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkConnectivityConfigurationsResponsePb that = + (ListNetworkConnectivityConfigurationsResponsePb) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkConnectivityConfigurationsResponsePb.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java index 963c735cc..f702d7cee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List network policies */ @Generated +@JsonSerialize(using = ListNetworkPoliciesRequest.ListNetworkPoliciesRequestSerializer.class) +@JsonDeserialize(using = ListNetworkPoliciesRequest.ListNetworkPoliciesRequestDeserializer.class) public class ListNetworkPoliciesRequest { /** Pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListNetworkPoliciesRequest setPageToken(String pageToken) { @@ -42,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListNetworkPoliciesRequest.class).add("pageToken", pageToken).toString(); } + + ListNetworkPoliciesRequestPb toPb() { + ListNetworkPoliciesRequestPb pb = new ListNetworkPoliciesRequestPb(); + pb.setPageToken(pageToken); + + return pb; + } + + static ListNetworkPoliciesRequest fromPb(ListNetworkPoliciesRequestPb pb) { + ListNetworkPoliciesRequest model = new ListNetworkPoliciesRequest(); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListNetworkPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNetworkPoliciesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNetworkPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNetworkPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListNetworkPoliciesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNetworkPoliciesRequestPb pb = mapper.readValue(p, ListNetworkPoliciesRequestPb.class); + return ListNetworkPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequestPb.java new file mode 100755 index 000000000..520d897fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequestPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List network policies */ +@Generated +class ListNetworkPoliciesRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListNetworkPoliciesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkPoliciesRequestPb that = (ListNetworkPoliciesRequestPb) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkPoliciesRequestPb.class) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java index 5574a6dc4..aded7c12c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListNetworkPoliciesResponse.ListNetworkPoliciesResponseSerializer.class) +@JsonDeserialize(using = ListNetworkPoliciesResponse.ListNetworkPoliciesResponseDeserializer.class) public class ListNetworkPoliciesResponse { /** List of network policies. */ - @JsonProperty("items") private Collection items; /** * A token that can be used to get the next page of results. If null, there are no more results to * show. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListNetworkPoliciesResponse setItems(Collection items) { @@ -59,4 +68,43 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListNetworkPoliciesResponsePb toPb() { + ListNetworkPoliciesResponsePb pb = new ListNetworkPoliciesResponsePb(); + pb.setItems(items); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListNetworkPoliciesResponse fromPb(ListNetworkPoliciesResponsePb pb) { + ListNetworkPoliciesResponse model = new ListNetworkPoliciesResponse(); + model.setItems(pb.getItems()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListNetworkPoliciesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNetworkPoliciesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNetworkPoliciesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNetworkPoliciesResponseDeserializer + extends JsonDeserializer { + @Override + public ListNetworkPoliciesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNetworkPoliciesResponsePb pb = mapper.readValue(p, ListNetworkPoliciesResponsePb.class); + return ListNetworkPoliciesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponsePb.java new file mode 100755 index 000000000..fd7937cdf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListNetworkPoliciesResponsePb { + @JsonProperty("items") + private Collection items; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListNetworkPoliciesResponsePb setItems(Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListNetworkPoliciesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkPoliciesResponsePb that = (ListNetworkPoliciesResponsePb) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkPoliciesResponsePb.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequest.java index 0e06df8a9..5f2cc7abd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequest.java @@ -3,22 +3,31 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List notification destinations */ @Generated +@JsonSerialize( + using = ListNotificationDestinationsRequest.ListNotificationDestinationsRequestSerializer.class) +@JsonDeserialize( + using = + ListNotificationDestinationsRequest.ListNotificationDestinationsRequestDeserializer.class) public class ListNotificationDestinationsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListNotificationDestinationsRequest setPageSize(Long pageSize) { @@ -59,4 +68,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListNotificationDestinationsRequestPb toPb() { + ListNotificationDestinationsRequestPb pb = new ListNotificationDestinationsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListNotificationDestinationsRequest fromPb(ListNotificationDestinationsRequestPb pb) { + ListNotificationDestinationsRequest model = new ListNotificationDestinationsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListNotificationDestinationsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNotificationDestinationsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNotificationDestinationsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNotificationDestinationsRequestDeserializer + extends JsonDeserializer { + @Override + public ListNotificationDestinationsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNotificationDestinationsRequestPb pb = + mapper.readValue(p, ListNotificationDestinationsRequestPb.class); + return ListNotificationDestinationsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequestPb.java new file mode 100755 index 000000000..962a84f5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List notification destinations */ +@Generated +class ListNotificationDestinationsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListNotificationDestinationsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListNotificationDestinationsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNotificationDestinationsRequestPb that = (ListNotificationDestinationsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNotificationDestinationsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java index fd6626b15..0a136d0b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java @@ -4,18 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + ListNotificationDestinationsResponse.ListNotificationDestinationsResponseSerializer.class) +@JsonDeserialize( + using = + ListNotificationDestinationsResponse.ListNotificationDestinationsResponseDeserializer.class) public class ListNotificationDestinationsResponse { /** Page token for next of results. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("results") private Collection results; public ListNotificationDestinationsResponse setNextPageToken(String nextPageToken) { @@ -58,4 +71,44 @@ public String toString() { .add("results", results) .toString(); } + + ListNotificationDestinationsResponsePb toPb() { + ListNotificationDestinationsResponsePb pb = new ListNotificationDestinationsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setResults(results); + + return pb; + } + + static ListNotificationDestinationsResponse fromPb(ListNotificationDestinationsResponsePb pb) { + ListNotificationDestinationsResponse model = new ListNotificationDestinationsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListNotificationDestinationsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNotificationDestinationsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNotificationDestinationsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNotificationDestinationsResponseDeserializer + extends JsonDeserializer { + @Override + public ListNotificationDestinationsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNotificationDestinationsResponsePb pb = + mapper.readValue(p, ListNotificationDestinationsResponsePb.class); + return ListNotificationDestinationsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponsePb.java new file mode 100755 index 000000000..c7ea76009 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListNotificationDestinationsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("results") + private Collection results; + + public ListNotificationDestinationsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListNotificationDestinationsResponsePb setResults( + Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNotificationDestinationsResponsePb that = (ListNotificationDestinationsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, results); + } + + @Override + public String toString() { + return new ToStringer(ListNotificationDestinationsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java index 652046f46..ebd19568b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java @@ -4,23 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = ListNotificationDestinationsResult.ListNotificationDestinationsResultSerializer.class) +@JsonDeserialize( + using = ListNotificationDestinationsResult.ListNotificationDestinationsResultDeserializer.class) public class ListNotificationDestinationsResult { /** * [Output-only] The type of the notification destination. The type can not be changed once set. */ - @JsonProperty("destination_type") private DestinationType destinationType; /** The display name for the notification destination. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying notification destination. */ - @JsonProperty("id") private String id; public ListNotificationDestinationsResult setDestinationType(DestinationType destinationType) { @@ -73,4 +83,46 @@ public String toString() { .add("id", id) .toString(); } + + ListNotificationDestinationsResultPb toPb() { + ListNotificationDestinationsResultPb pb = new ListNotificationDestinationsResultPb(); + pb.setDestinationType(destinationType); + pb.setDisplayName(displayName); + pb.setId(id); + + return pb; + } + + static ListNotificationDestinationsResult fromPb(ListNotificationDestinationsResultPb pb) { + ListNotificationDestinationsResult model = new ListNotificationDestinationsResult(); + model.setDestinationType(pb.getDestinationType()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + + return model; + } + + public static class ListNotificationDestinationsResultSerializer + extends JsonSerializer { + @Override + public void serialize( + ListNotificationDestinationsResult value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListNotificationDestinationsResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListNotificationDestinationsResultDeserializer + extends JsonDeserializer { + @Override + public ListNotificationDestinationsResult deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListNotificationDestinationsResultPb pb = + mapper.readValue(p, ListNotificationDestinationsResultPb.class); + return ListNotificationDestinationsResult.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResultPb.java new file mode 100755 index 000000000..106323fc9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResultPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListNotificationDestinationsResultPb { + @JsonProperty("destination_type") + private DestinationType destinationType; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + public ListNotificationDestinationsResultPb setDestinationType(DestinationType destinationType) { + this.destinationType = destinationType; + return this; + } + + public DestinationType getDestinationType() { + return destinationType; + } + + public ListNotificationDestinationsResultPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ListNotificationDestinationsResultPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNotificationDestinationsResultPb that = (ListNotificationDestinationsResultPb) o; + return Objects.equals(destinationType, that.destinationType) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(destinationType, displayName, id); + } + + @Override + public String toString() { + return new ToStringer(ListNotificationDestinationsResultPb.class) + .add("destinationType", destinationType) + .add("displayName", displayName) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java index 2772738c9..55edfd6f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java @@ -3,20 +3,30 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List private endpoint rules */ @Generated +@JsonSerialize( + using = ListPrivateEndpointRulesRequest.ListPrivateEndpointRulesRequestSerializer.class) +@JsonDeserialize( + using = ListPrivateEndpointRulesRequest.ListPrivateEndpointRulesRequestDeserializer.class) public class ListPrivateEndpointRulesRequest { /** Your Network Connectvity Configuration ID. */ - @JsonIgnore private String networkConnectivityConfigId; + private String networkConnectivityConfigId; /** Pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListPrivateEndpointRulesRequest setNetworkConnectivityConfigId( @@ -59,4 +69,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListPrivateEndpointRulesRequestPb toPb() { + ListPrivateEndpointRulesRequestPb pb = new ListPrivateEndpointRulesRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setPageToken(pageToken); + + return pb; + } + + static ListPrivateEndpointRulesRequest fromPb(ListPrivateEndpointRulesRequestPb pb) { + ListPrivateEndpointRulesRequest model = new ListPrivateEndpointRulesRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListPrivateEndpointRulesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPrivateEndpointRulesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPrivateEndpointRulesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPrivateEndpointRulesRequestDeserializer + extends JsonDeserializer { + @Override + public ListPrivateEndpointRulesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPrivateEndpointRulesRequestPb pb = + mapper.readValue(p, ListPrivateEndpointRulesRequestPb.class); + return ListPrivateEndpointRulesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequestPb.java new file mode 100755 index 000000000..1425f505b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List private endpoint rules */ +@Generated +class ListPrivateEndpointRulesRequestPb { + @JsonIgnore private String networkConnectivityConfigId; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListPrivateEndpointRulesRequestPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public ListPrivateEndpointRulesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPrivateEndpointRulesRequestPb that = (ListPrivateEndpointRulesRequestPb) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPrivateEndpointRulesRequestPb.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java new file mode 100755 index 000000000..dfb92c34a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +/** The private endpoint rule list was successfully retrieved. */ +@Generated +@JsonSerialize( + using = ListPrivateEndpointRulesResponse.ListPrivateEndpointRulesResponseSerializer.class) +@JsonDeserialize( + using = ListPrivateEndpointRulesResponse.ListPrivateEndpointRulesResponseDeserializer.class) +public class ListPrivateEndpointRulesResponse { + /** */ + private Collection items; + + /** + * A token that can be used to get the next page of results. If null, there are no more results to + * show. + */ + private String nextPageToken; + + public ListPrivateEndpointRulesResponse setItems(Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListPrivateEndpointRulesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPrivateEndpointRulesResponse that = (ListPrivateEndpointRulesResponse) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPrivateEndpointRulesResponse.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } + + ListPrivateEndpointRulesResponsePb toPb() { + ListPrivateEndpointRulesResponsePb pb = new ListPrivateEndpointRulesResponsePb(); + pb.setItems(items); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListPrivateEndpointRulesResponse fromPb(ListPrivateEndpointRulesResponsePb pb) { + ListPrivateEndpointRulesResponse model = new ListPrivateEndpointRulesResponse(); + model.setItems(pb.getItems()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListPrivateEndpointRulesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPrivateEndpointRulesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPrivateEndpointRulesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPrivateEndpointRulesResponseDeserializer + extends JsonDeserializer { + @Override + public ListPrivateEndpointRulesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPrivateEndpointRulesResponsePb pb = + mapper.readValue(p, ListPrivateEndpointRulesResponsePb.class); + return ListPrivateEndpointRulesResponse.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponsePb.java similarity index 63% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponsePb.java index 03ccf6398..ebf6bda4c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponsePb.java @@ -10,29 +10,23 @@ /** The private endpoint rule list was successfully retrieved. */ @Generated -public class ListNccAzurePrivateEndpointRulesResponse { - /** */ +class ListPrivateEndpointRulesResponsePb { @JsonProperty("items") - private Collection items; + private Collection items; - /** - * A token that can be used to get the next page of results. If null, there are no more results to - * show. - */ @JsonProperty("next_page_token") private String nextPageToken; - public ListNccAzurePrivateEndpointRulesResponse setItems( - Collection items) { + public ListPrivateEndpointRulesResponsePb setItems(Collection items) { this.items = items; return this; } - public Collection getItems() { + public Collection getItems() { return items; } - public ListNccAzurePrivateEndpointRulesResponse setNextPageToken(String nextPageToken) { + public ListPrivateEndpointRulesResponsePb setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -45,7 +39,7 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListNccAzurePrivateEndpointRulesResponse that = (ListNccAzurePrivateEndpointRulesResponse) o; + ListPrivateEndpointRulesResponsePb that = (ListPrivateEndpointRulesResponsePb) o; return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); } @@ -56,7 +50,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ListNccAzurePrivateEndpointRulesResponse.class) + return new ToStringer(ListPrivateEndpointRulesResponsePb.class) .add("items", items) .add("nextPageToken", nextPageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java index 445c7789f..6ef7b1467 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListPublicTokensResponse.ListPublicTokensResponseSerializer.class) +@JsonDeserialize(using = ListPublicTokensResponse.ListPublicTokensResponseDeserializer.class) public class ListPublicTokensResponse { /** The information for each token. */ - @JsonProperty("token_infos") private Collection tokenInfos; public ListPublicTokensResponse setTokenInfos(Collection tokenInfos) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListPublicTokensResponse.class).add("tokenInfos", tokenInfos).toString(); } + + ListPublicTokensResponsePb toPb() { + ListPublicTokensResponsePb pb = new ListPublicTokensResponsePb(); + pb.setTokenInfos(tokenInfos); + + return pb; + } + + static ListPublicTokensResponse fromPb(ListPublicTokensResponsePb pb) { + ListPublicTokensResponse model = new ListPublicTokensResponse(); + model.setTokenInfos(pb.getTokenInfos()); + + return model; + } + + public static class ListPublicTokensResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListPublicTokensResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListPublicTokensResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListPublicTokensResponseDeserializer + extends JsonDeserializer { + @Override + public ListPublicTokensResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListPublicTokensResponsePb pb = mapper.readValue(p, ListPublicTokensResponsePb.class); + return ListPublicTokensResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponsePb.java new file mode 100755 index 000000000..b1e9d8007 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListPublicTokensResponsePb { + @JsonProperty("token_infos") + private Collection tokenInfos; + + public ListPublicTokensResponsePb setTokenInfos(Collection tokenInfos) { + this.tokenInfos = tokenInfos; + return this; + } + + public Collection getTokenInfos() { + return tokenInfos; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPublicTokensResponsePb that = (ListPublicTokensResponsePb) o; + return Objects.equals(tokenInfos, that.tokenInfos); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfos); + } + + @Override + public String toString() { + return new ToStringer(ListPublicTokensResponsePb.class) + .add("tokenInfos", tokenInfos) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java index 30086533f..a22d41cc4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all tokens */ @Generated +@JsonSerialize(using = ListTokenManagementRequest.ListTokenManagementRequestSerializer.class) +@JsonDeserialize(using = ListTokenManagementRequest.ListTokenManagementRequestDeserializer.class) public class ListTokenManagementRequest { /** User ID of the user that created the token. */ - @JsonIgnore - @QueryParam("created_by_id") private Long createdById; /** Username of the user that created the token. */ - @JsonIgnore - @QueryParam("created_by_username") private String createdByUsername; public ListTokenManagementRequest setCreatedById(Long createdById) { @@ -60,4 +66,43 @@ public String toString() { .add("createdByUsername", createdByUsername) .toString(); } + + ListTokenManagementRequestPb toPb() { + ListTokenManagementRequestPb pb = new ListTokenManagementRequestPb(); + pb.setCreatedById(createdById); + pb.setCreatedByUsername(createdByUsername); + + return pb; + } + + static ListTokenManagementRequest fromPb(ListTokenManagementRequestPb pb) { + ListTokenManagementRequest model = new ListTokenManagementRequest(); + model.setCreatedById(pb.getCreatedById()); + model.setCreatedByUsername(pb.getCreatedByUsername()); + + return model; + } + + public static class ListTokenManagementRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListTokenManagementRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTokenManagementRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTokenManagementRequestDeserializer + extends JsonDeserializer { + @Override + public ListTokenManagementRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTokenManagementRequestPb pb = mapper.readValue(p, ListTokenManagementRequestPb.class); + return ListTokenManagementRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequestPb.java new file mode 100755 index 000000000..179331a49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequestPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all tokens */ +@Generated +class ListTokenManagementRequestPb { + @JsonIgnore + @QueryParam("created_by_id") + private Long createdById; + + @JsonIgnore + @QueryParam("created_by_username") + private String createdByUsername; + + public ListTokenManagementRequestPb setCreatedById(Long createdById) { + this.createdById = createdById; + return this; + } + + public Long getCreatedById() { + return createdById; + } + + public ListTokenManagementRequestPb setCreatedByUsername(String createdByUsername) { + this.createdByUsername = createdByUsername; + return this; + } + + public String getCreatedByUsername() { + return createdByUsername; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTokenManagementRequestPb that = (ListTokenManagementRequestPb) o; + return Objects.equals(createdById, that.createdById) + && Objects.equals(createdByUsername, that.createdByUsername); + } + + @Override + public int hashCode() { + return Objects.hash(createdById, createdByUsername); + } + + @Override + public String toString() { + return new ToStringer(ListTokenManagementRequestPb.class) + .add("createdById", createdById) + .add("createdByUsername", createdByUsername) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java index 09fa4602d..6316268d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Tokens were successfully returned. */ @Generated +@JsonSerialize(using = ListTokensResponse.ListTokensResponseSerializer.class) +@JsonDeserialize(using = ListTokensResponse.ListTokensResponseDeserializer.class) public class ListTokensResponse { /** Token metadata of each user-created token in the workspace */ - @JsonProperty("token_infos") private Collection tokenInfos; public ListTokensResponse setTokenInfos(Collection tokenInfos) { @@ -41,4 +51,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListTokensResponse.class).add("tokenInfos", tokenInfos).toString(); } + + ListTokensResponsePb toPb() { + ListTokensResponsePb pb = new ListTokensResponsePb(); + pb.setTokenInfos(tokenInfos); + + return pb; + } + + static ListTokensResponse fromPb(ListTokensResponsePb pb) { + ListTokensResponse model = new ListTokensResponse(); + model.setTokenInfos(pb.getTokenInfos()); + + return model; + } + + public static class ListTokensResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListTokensResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListTokensResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListTokensResponseDeserializer extends JsonDeserializer { + @Override + public ListTokensResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListTokensResponsePb pb = mapper.readValue(p, ListTokensResponsePb.class); + return ListTokensResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponsePb.java new file mode 100755 index 000000000..07aa7af00 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponsePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Tokens were successfully returned. */ +@Generated +class ListTokensResponsePb { + @JsonProperty("token_infos") + private Collection tokenInfos; + + public ListTokensResponsePb setTokenInfos(Collection tokenInfos) { + this.tokenInfos = tokenInfos; + return this; + } + + public Collection getTokenInfos() { + return tokenInfos; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTokensResponsePb that = (ListTokensResponsePb) o; + return Objects.equals(tokenInfos, that.tokenInfos); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfos); + } + + @Override + public String toString() { + return new ToStringer(ListTokensResponsePb.class).add("tokenInfos", tokenInfos).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java index adb284ade..9a1a95dcd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LlmProxyPartnerPoweredAccount.LlmProxyPartnerPoweredAccountSerializer.class) +@JsonDeserialize( + using = LlmProxyPartnerPoweredAccount.LlmProxyPartnerPoweredAccountDeserializer.class) public class LlmProxyPartnerPoweredAccount { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -21,7 +32,6 @@ public class LlmProxyPartnerPoweredAccount { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +40,6 @@ public class LlmProxyPartnerPoweredAccount { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public LlmProxyPartnerPoweredAccount setBooleanVal(BooleanMessage booleanVal) { @@ -83,4 +92,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + LlmProxyPartnerPoweredAccountPb toPb() { + LlmProxyPartnerPoweredAccountPb pb = new LlmProxyPartnerPoweredAccountPb(); + pb.setBooleanVal(booleanVal); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static LlmProxyPartnerPoweredAccount fromPb(LlmProxyPartnerPoweredAccountPb pb) { + LlmProxyPartnerPoweredAccount model = new LlmProxyPartnerPoweredAccount(); + model.setBooleanVal(pb.getBooleanVal()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class LlmProxyPartnerPoweredAccountSerializer + extends JsonSerializer { + @Override + public void serialize( + LlmProxyPartnerPoweredAccount value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LlmProxyPartnerPoweredAccountPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LlmProxyPartnerPoweredAccountDeserializer + extends JsonDeserializer { + @Override + public LlmProxyPartnerPoweredAccount deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LlmProxyPartnerPoweredAccountPb pb = + mapper.readValue(p, LlmProxyPartnerPoweredAccountPb.class); + return LlmProxyPartnerPoweredAccount.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java index ab253c810..6ce5b9fed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java @@ -24,7 +24,7 @@ public LlmProxyPartnerPoweredAccount get(GetLlmProxyPartnerPoweredAccountRequest apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public LlmProxyPartnerPoweredAccount update(UpdateLlmProxyPartnerPoweredAccountR apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountPb.java new file mode 100755 index 000000000..00e61c250 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LlmProxyPartnerPoweredAccountPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredAccountPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredAccountPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredAccountPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredAccountPb that = (LlmProxyPartnerPoweredAccountPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredAccountPb.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java index 653a3ddd6..36e181ccd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java @@ -4,13 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LlmProxyPartnerPoweredEnforce.LlmProxyPartnerPoweredEnforceSerializer.class) +@JsonDeserialize( + using = LlmProxyPartnerPoweredEnforce.LlmProxyPartnerPoweredEnforceDeserializer.class) public class LlmProxyPartnerPoweredEnforce { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -21,7 +32,6 @@ public class LlmProxyPartnerPoweredEnforce { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +40,6 @@ public class LlmProxyPartnerPoweredEnforce { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public LlmProxyPartnerPoweredEnforce setBooleanVal(BooleanMessage booleanVal) { @@ -83,4 +92,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + LlmProxyPartnerPoweredEnforcePb toPb() { + LlmProxyPartnerPoweredEnforcePb pb = new LlmProxyPartnerPoweredEnforcePb(); + pb.setBooleanVal(booleanVal); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static LlmProxyPartnerPoweredEnforce fromPb(LlmProxyPartnerPoweredEnforcePb pb) { + LlmProxyPartnerPoweredEnforce model = new LlmProxyPartnerPoweredEnforce(); + model.setBooleanVal(pb.getBooleanVal()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class LlmProxyPartnerPoweredEnforceSerializer + extends JsonSerializer { + @Override + public void serialize( + LlmProxyPartnerPoweredEnforce value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LlmProxyPartnerPoweredEnforcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LlmProxyPartnerPoweredEnforceDeserializer + extends JsonDeserializer { + @Override + public LlmProxyPartnerPoweredEnforce deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LlmProxyPartnerPoweredEnforcePb pb = + mapper.readValue(p, LlmProxyPartnerPoweredEnforcePb.class); + return LlmProxyPartnerPoweredEnforce.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java index 8ffdaee6c..f86e6f1dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java @@ -24,7 +24,7 @@ public LlmProxyPartnerPoweredEnforce get(GetLlmProxyPartnerPoweredEnforceRequest apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public LlmProxyPartnerPoweredEnforce update(UpdateLlmProxyPartnerPoweredEnforceR apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforcePb.java new file mode 100755 index 000000000..c3d953b19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforcePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LlmProxyPartnerPoweredEnforcePb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredEnforcePb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredEnforcePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredEnforcePb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredEnforcePb that = (LlmProxyPartnerPoweredEnforcePb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredEnforcePb.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java index 85cd8a171..c14ec92a2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = LlmProxyPartnerPoweredWorkspace.LlmProxyPartnerPoweredWorkspaceSerializer.class) +@JsonDeserialize( + using = LlmProxyPartnerPoweredWorkspace.LlmProxyPartnerPoweredWorkspaceDeserializer.class) public class LlmProxyPartnerPoweredWorkspace { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -21,7 +33,6 @@ public class LlmProxyPartnerPoweredWorkspace { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +41,6 @@ public class LlmProxyPartnerPoweredWorkspace { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public LlmProxyPartnerPoweredWorkspace setBooleanVal(BooleanMessage booleanVal) { @@ -83,4 +93,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + LlmProxyPartnerPoweredWorkspacePb toPb() { + LlmProxyPartnerPoweredWorkspacePb pb = new LlmProxyPartnerPoweredWorkspacePb(); + pb.setBooleanVal(booleanVal); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static LlmProxyPartnerPoweredWorkspace fromPb(LlmProxyPartnerPoweredWorkspacePb pb) { + LlmProxyPartnerPoweredWorkspace model = new LlmProxyPartnerPoweredWorkspace(); + model.setBooleanVal(pb.getBooleanVal()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class LlmProxyPartnerPoweredWorkspaceSerializer + extends JsonSerializer { + @Override + public void serialize( + LlmProxyPartnerPoweredWorkspace value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LlmProxyPartnerPoweredWorkspacePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LlmProxyPartnerPoweredWorkspaceDeserializer + extends JsonDeserializer { + @Override + public LlmProxyPartnerPoweredWorkspace deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LlmProxyPartnerPoweredWorkspacePb pb = + mapper.readValue(p, LlmProxyPartnerPoweredWorkspacePb.class); + return LlmProxyPartnerPoweredWorkspace.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java index 7ebe4b415..35085e9c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java @@ -22,7 +22,7 @@ public DeleteLlmProxyPartnerPoweredWorkspaceResponse delete( String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteLlmProxyPartnerPoweredWorkspaceResponse.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public LlmProxyPartnerPoweredWorkspace get(GetLlmProxyPartnerPoweredWorkspaceReq String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public LlmProxyPartnerPoweredWorkspace update( String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspacePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspacePb.java new file mode 100755 index 000000000..54d28cd1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspacePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LlmProxyPartnerPoweredWorkspacePb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredWorkspacePb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredWorkspacePb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredWorkspacePb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredWorkspacePb that = (LlmProxyPartnerPoweredWorkspacePb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredWorkspacePb.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java index 330fcb926..4a9e111f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MicrosoftTeamsConfig.MicrosoftTeamsConfigSerializer.class) +@JsonDeserialize(using = MicrosoftTeamsConfig.MicrosoftTeamsConfigDeserializer.class) public class MicrosoftTeamsConfig { /** [Input-Only] URL for Microsoft Teams. */ - @JsonProperty("url") private String url; /** [Output-Only] Whether URL is set. */ - @JsonProperty("url_set") private Boolean urlSet; public MicrosoftTeamsConfig setUrl(String url) { @@ -55,4 +64,42 @@ public String toString() { .add("urlSet", urlSet) .toString(); } + + MicrosoftTeamsConfigPb toPb() { + MicrosoftTeamsConfigPb pb = new MicrosoftTeamsConfigPb(); + pb.setUrl(url); + pb.setUrlSet(urlSet); + + return pb; + } + + static MicrosoftTeamsConfig fromPb(MicrosoftTeamsConfigPb pb) { + MicrosoftTeamsConfig model = new MicrosoftTeamsConfig(); + model.setUrl(pb.getUrl()); + model.setUrlSet(pb.getUrlSet()); + + return model; + } + + public static class MicrosoftTeamsConfigSerializer extends JsonSerializer { + @Override + public void serialize( + MicrosoftTeamsConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MicrosoftTeamsConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MicrosoftTeamsConfigDeserializer + extends JsonDeserializer { + @Override + public MicrosoftTeamsConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MicrosoftTeamsConfigPb pb = mapper.readValue(p, MicrosoftTeamsConfigPb.class); + return MicrosoftTeamsConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfigPb.java new file mode 100755 index 000000000..b2d0d646e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfigPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MicrosoftTeamsConfigPb { + @JsonProperty("url") + private String url; + + @JsonProperty("url_set") + private Boolean urlSet; + + public MicrosoftTeamsConfigPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + public MicrosoftTeamsConfigPb setUrlSet(Boolean urlSet) { + this.urlSet = urlSet; + return this; + } + + public Boolean getUrlSet() { + return urlSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MicrosoftTeamsConfigPb that = (MicrosoftTeamsConfigPb) o; + return Objects.equals(url, that.url) && Objects.equals(urlSet, that.urlSet); + } + + @Override + public int hashCode() { + return Objects.hash(url, urlSet); + } + + @Override + public String toString() { + return new ToStringer(MicrosoftTeamsConfigPb.class) + .add("url", url) + .add("urlSet", urlSet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java index d923e0b3e..05410bb65 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,12 +22,13 @@ * allow traffic from your Databricks workspace. */ @Generated +@JsonSerialize(using = NccAwsStableIpRule.NccAwsStableIpRuleSerializer.class) +@JsonDeserialize(using = NccAwsStableIpRule.NccAwsStableIpRuleDeserializer.class) public class NccAwsStableIpRule { /** * The list of stable IP CIDR blocks from which Databricks network traffic originates when * accessing your resources. */ - @JsonProperty("cidr_blocks") private Collection cidrBlocks; public NccAwsStableIpRule setCidrBlocks(Collection cidrBlocks) { @@ -47,4 +57,38 @@ public int hashCode() { public String toString() { return new ToStringer(NccAwsStableIpRule.class).add("cidrBlocks", cidrBlocks).toString(); } + + NccAwsStableIpRulePb toPb() { + NccAwsStableIpRulePb pb = new NccAwsStableIpRulePb(); + pb.setCidrBlocks(cidrBlocks); + + return pb; + } + + static NccAwsStableIpRule fromPb(NccAwsStableIpRulePb pb) { + NccAwsStableIpRule model = new NccAwsStableIpRule(); + model.setCidrBlocks(pb.getCidrBlocks()); + + return model; + } + + public static class NccAwsStableIpRuleSerializer extends JsonSerializer { + @Override + public void serialize(NccAwsStableIpRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccAwsStableIpRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccAwsStableIpRuleDeserializer extends JsonDeserializer { + @Override + public NccAwsStableIpRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccAwsStableIpRulePb pb = mapper.readValue(p, NccAwsStableIpRulePb.class); + return NccAwsStableIpRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRulePb.java new file mode 100755 index 000000000..bcf6b0803 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRulePb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to + * allow traffic from your Databricks workspace. + */ +@Generated +class NccAwsStableIpRulePb { + @JsonProperty("cidr_blocks") + private Collection cidrBlocks; + + public NccAwsStableIpRulePb setCidrBlocks(Collection cidrBlocks) { + this.cidrBlocks = cidrBlocks; + return this; + } + + public Collection getCidrBlocks() { + return cidrBlocks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccAwsStableIpRulePb that = (NccAwsStableIpRulePb) o; + return Objects.equals(cidrBlocks, that.cidrBlocks); + } + + @Override + public int hashCode() { + return Objects.hash(cidrBlocks); + } + + @Override + public String toString() { + return new ToStringer(NccAwsStableIpRulePb.class).add("cidrBlocks", cidrBlocks).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java index 6228e6f9b..16811a5f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,6 +22,8 @@ * portal after initialization. */ @Generated +@JsonSerialize(using = NccAzurePrivateEndpointRule.NccAzurePrivateEndpointRuleSerializer.class) +@JsonDeserialize(using = NccAzurePrivateEndpointRule.NccAzurePrivateEndpointRuleDeserializer.class) public class NccAzurePrivateEndpointRule { /** * The current status of this private endpoint. The private endpoint rules are effective only if @@ -25,61 +36,49 @@ public class NccAzurePrivateEndpointRule { * the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the * endpoint was created but not approved in 14 days, it will be EXPIRED. */ - @JsonProperty("connection_state") private NccAzurePrivateEndpointRuleConnectionState connectionState; /** Time in epoch milliseconds when this object was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Whether this private endpoint is deactivated. */ - @JsonProperty("deactivated") private Boolean deactivated; /** Time in epoch milliseconds when this object was deactivated. */ - @JsonProperty("deactivated_at") private Long deactivatedAt; /** - * Only used by private endpoints to customer-managed resources. + * Not used by customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. */ - @JsonProperty("domain_names") private Collection domainNames; /** The name of the Azure private endpoint resource. */ - @JsonProperty("endpoint_name") private String endpointName; /** - * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - * mysqlServer + * Only used by private endpoints to Azure first-party services. * *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. */ - @JsonProperty("group_id") private String groupId; /** * The ID of a network connectivity configuration, which is the parent resource of this private * endpoint rule object. */ - @JsonProperty("network_connectivity_config_id") private String networkConnectivityConfigId; /** The Azure resource ID of the target resource. */ - @JsonProperty("resource_id") private String resourceId; /** The ID of a private endpoint rule. */ - @JsonProperty("rule_id") private String ruleId; /** Time in epoch milliseconds when this object was updated. */ - @JsonProperty("updated_time") private Long updatedTime; public NccAzurePrivateEndpointRule setConnectionState( @@ -233,4 +232,61 @@ public String toString() { .add("updatedTime", updatedTime) .toString(); } + + NccAzurePrivateEndpointRulePb toPb() { + NccAzurePrivateEndpointRulePb pb = new NccAzurePrivateEndpointRulePb(); + pb.setConnectionState(connectionState); + pb.setCreationTime(creationTime); + pb.setDeactivated(deactivated); + pb.setDeactivatedAt(deactivatedAt); + pb.setDomainNames(domainNames); + pb.setEndpointName(endpointName); + pb.setGroupId(groupId); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setResourceId(resourceId); + pb.setRuleId(ruleId); + pb.setUpdatedTime(updatedTime); + + return pb; + } + + static NccAzurePrivateEndpointRule fromPb(NccAzurePrivateEndpointRulePb pb) { + NccAzurePrivateEndpointRule model = new NccAzurePrivateEndpointRule(); + model.setConnectionState(pb.getConnectionState()); + model.setCreationTime(pb.getCreationTime()); + model.setDeactivated(pb.getDeactivated()); + model.setDeactivatedAt(pb.getDeactivatedAt()); + model.setDomainNames(pb.getDomainNames()); + model.setEndpointName(pb.getEndpointName()); + model.setGroupId(pb.getGroupId()); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setResourceId(pb.getResourceId()); + model.setRuleId(pb.getRuleId()); + model.setUpdatedTime(pb.getUpdatedTime()); + + return model; + } + + public static class NccAzurePrivateEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + NccAzurePrivateEndpointRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccAzurePrivateEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccAzurePrivateEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public NccAzurePrivateEndpointRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccAzurePrivateEndpointRulePb pb = mapper.readValue(p, NccAzurePrivateEndpointRulePb.class); + return NccAzurePrivateEndpointRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRulePb.java new file mode 100755 index 000000000..e8c896993 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRulePb.java @@ -0,0 +1,201 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +class NccAzurePrivateEndpointRulePb { + @JsonProperty("connection_state") + private NccAzurePrivateEndpointRuleConnectionState connectionState; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("deactivated") + private Boolean deactivated; + + @JsonProperty("deactivated_at") + private Long deactivatedAt; + + @JsonProperty("domain_names") + private Collection domainNames; + + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonProperty("group_id") + private String groupId; + + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + @JsonProperty("resource_id") + private String resourceId; + + @JsonProperty("rule_id") + private String ruleId; + + @JsonProperty("updated_time") + private Long updatedTime; + + public NccAzurePrivateEndpointRulePb setConnectionState( + NccAzurePrivateEndpointRuleConnectionState connectionState) { + this.connectionState = connectionState; + return this; + } + + public NccAzurePrivateEndpointRuleConnectionState getConnectionState() { + return connectionState; + } + + public NccAzurePrivateEndpointRulePb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NccAzurePrivateEndpointRulePb setDeactivated(Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public NccAzurePrivateEndpointRulePb setDeactivatedAt(Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public NccAzurePrivateEndpointRulePb setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public NccAzurePrivateEndpointRulePb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public NccAzurePrivateEndpointRulePb setGroupId(String groupId) { + this.groupId = groupId; + return this; + } + + public String getGroupId() { + return groupId; + } + + public NccAzurePrivateEndpointRulePb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public NccAzurePrivateEndpointRulePb setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public NccAzurePrivateEndpointRulePb setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public NccAzurePrivateEndpointRulePb setUpdatedTime(Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccAzurePrivateEndpointRulePb that = (NccAzurePrivateEndpointRulePb) o; + return Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(groupId, that.groupId) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime); + } + + @Override + public int hashCode() { + return Objects.hash( + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + endpointName, + groupId, + networkConnectivityConfigId, + resourceId, + ruleId, + updatedTime); + } + + @Override + public String toString() { + return new ToStringer(NccAzurePrivateEndpointRulePb.class) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("endpointName", endpointName) + .add("groupId", groupId) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceId", resourceId) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java index b0a6607e6..d6b626eb4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,20 +22,19 @@ * allow traffic from your Databricks serverless compute resources. */ @Generated +@JsonSerialize(using = NccAzureServiceEndpointRule.NccAzureServiceEndpointRuleSerializer.class) +@JsonDeserialize(using = NccAzureServiceEndpointRule.NccAzureServiceEndpointRuleDeserializer.class) public class NccAzureServiceEndpointRule { /** * The list of subnets from which Databricks network traffic originates when accessing your Azure * resources. */ - @JsonProperty("subnets") private Collection subnets; /** The Azure region in which this service endpoint rule applies.. */ - @JsonProperty("target_region") private String targetRegion; /** The Azure services to which this service endpoint rule applies to. */ - @JsonProperty("target_services") private Collection targetServices; public NccAzureServiceEndpointRule setSubnets(Collection subnets) { @@ -80,4 +88,45 @@ public String toString() { .add("targetServices", targetServices) .toString(); } + + NccAzureServiceEndpointRulePb toPb() { + NccAzureServiceEndpointRulePb pb = new NccAzureServiceEndpointRulePb(); + pb.setSubnets(subnets); + pb.setTargetRegion(targetRegion); + pb.setTargetServices(targetServices); + + return pb; + } + + static NccAzureServiceEndpointRule fromPb(NccAzureServiceEndpointRulePb pb) { + NccAzureServiceEndpointRule model = new NccAzureServiceEndpointRule(); + model.setSubnets(pb.getSubnets()); + model.setTargetRegion(pb.getTargetRegion()); + model.setTargetServices(pb.getTargetServices()); + + return model; + } + + public static class NccAzureServiceEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + NccAzureServiceEndpointRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccAzureServiceEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccAzureServiceEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public NccAzureServiceEndpointRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccAzureServiceEndpointRulePb pb = mapper.readValue(p, NccAzureServiceEndpointRulePb.class); + return NccAzureServiceEndpointRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRulePb.java new file mode 100755 index 000000000..c2f77057e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRulePb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The stable Azure service endpoints. You can configure the firewall of your Azure resources to + * allow traffic from your Databricks serverless compute resources. + */ +@Generated +class NccAzureServiceEndpointRulePb { + @JsonProperty("subnets") + private Collection subnets; + + @JsonProperty("target_region") + private String targetRegion; + + @JsonProperty("target_services") + private Collection targetServices; + + public NccAzureServiceEndpointRulePb setSubnets(Collection subnets) { + this.subnets = subnets; + return this; + } + + public Collection getSubnets() { + return subnets; + } + + public NccAzureServiceEndpointRulePb setTargetRegion(String targetRegion) { + this.targetRegion = targetRegion; + return this; + } + + public String getTargetRegion() { + return targetRegion; + } + + public NccAzureServiceEndpointRulePb setTargetServices( + Collection targetServices) { + this.targetServices = targetServices; + return this; + } + + public Collection getTargetServices() { + return targetServices; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccAzureServiceEndpointRulePb that = (NccAzureServiceEndpointRulePb) o; + return Objects.equals(subnets, that.subnets) + && Objects.equals(targetRegion, that.targetRegion) + && Objects.equals(targetServices, that.targetServices); + } + + @Override + public int hashCode() { + return Objects.hash(subnets, targetRegion, targetServices); + } + + @Override + public String toString() { + return new ToStringer(NccAzureServiceEndpointRulePb.class) + .add("subnets", subnets) + .add("targetRegion", targetRegion) + .add("targetServices", targetServices) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java index 1d5d18154..702d3be6d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NccEgressConfig.NccEgressConfigSerializer.class) +@JsonDeserialize(using = NccEgressConfig.NccEgressConfigDeserializer.class) public class NccEgressConfig { /** * The network connectivity rules that are applied by default without resource specific * configurations. You can find the stable network information of your serverless compute * resources here. */ - @JsonProperty("default_rules") private NccEgressDefaultRules defaultRules; /** * The network connectivity rules that configured for each destinations. These rules override * default rules. */ - @JsonProperty("target_rules") private NccEgressTargetRules targetRules; public NccEgressConfig setDefaultRules(NccEgressDefaultRules defaultRules) { @@ -63,4 +72,40 @@ public String toString() { .add("targetRules", targetRules) .toString(); } + + NccEgressConfigPb toPb() { + NccEgressConfigPb pb = new NccEgressConfigPb(); + pb.setDefaultRules(defaultRules); + pb.setTargetRules(targetRules); + + return pb; + } + + static NccEgressConfig fromPb(NccEgressConfigPb pb) { + NccEgressConfig model = new NccEgressConfig(); + model.setDefaultRules(pb.getDefaultRules()); + model.setTargetRules(pb.getTargetRules()); + + return model; + } + + public static class NccEgressConfigSerializer extends JsonSerializer { + @Override + public void serialize(NccEgressConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccEgressConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccEgressConfigDeserializer extends JsonDeserializer { + @Override + public NccEgressConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccEgressConfigPb pb = mapper.readValue(p, NccEgressConfigPb.class); + return NccEgressConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfigPb.java new file mode 100755 index 000000000..48422d1df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NccEgressConfigPb { + @JsonProperty("default_rules") + private NccEgressDefaultRules defaultRules; + + @JsonProperty("target_rules") + private NccEgressTargetRules targetRules; + + public NccEgressConfigPb setDefaultRules(NccEgressDefaultRules defaultRules) { + this.defaultRules = defaultRules; + return this; + } + + public NccEgressDefaultRules getDefaultRules() { + return defaultRules; + } + + public NccEgressConfigPb setTargetRules(NccEgressTargetRules targetRules) { + this.targetRules = targetRules; + return this; + } + + public NccEgressTargetRules getTargetRules() { + return targetRules; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccEgressConfigPb that = (NccEgressConfigPb) o; + return Objects.equals(defaultRules, that.defaultRules) + && Objects.equals(targetRules, that.targetRules); + } + + @Override + public int hashCode() { + return Objects.hash(defaultRules, targetRules); + } + + @Override + public String toString() { + return new ToStringer(NccEgressConfigPb.class) + .add("defaultRules", defaultRules) + .add("targetRules", targetRules) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java index e46162f5d..9b0c1082c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java @@ -4,24 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Default rules don't have specific targets. */ @Generated +@JsonSerialize(using = NccEgressDefaultRules.NccEgressDefaultRulesSerializer.class) +@JsonDeserialize(using = NccEgressDefaultRules.NccEgressDefaultRulesDeserializer.class) public class NccEgressDefaultRules { /** * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to * allow traffic from your Databricks workspace. */ - @JsonProperty("aws_stable_ip_rule") private NccAwsStableIpRule awsStableIpRule; /** * The stable Azure service endpoints. You can configure the firewall of your Azure resources to * allow traffic from your Databricks serverless compute resources. */ - @JsonProperty("azure_service_endpoint_rule") private NccAzureServiceEndpointRule azureServiceEndpointRule; public NccEgressDefaultRules setAwsStableIpRule(NccAwsStableIpRule awsStableIpRule) { @@ -64,4 +73,43 @@ public String toString() { .add("azureServiceEndpointRule", azureServiceEndpointRule) .toString(); } + + NccEgressDefaultRulesPb toPb() { + NccEgressDefaultRulesPb pb = new NccEgressDefaultRulesPb(); + pb.setAwsStableIpRule(awsStableIpRule); + pb.setAzureServiceEndpointRule(azureServiceEndpointRule); + + return pb; + } + + static NccEgressDefaultRules fromPb(NccEgressDefaultRulesPb pb) { + NccEgressDefaultRules model = new NccEgressDefaultRules(); + model.setAwsStableIpRule(pb.getAwsStableIpRule()); + model.setAzureServiceEndpointRule(pb.getAzureServiceEndpointRule()); + + return model; + } + + public static class NccEgressDefaultRulesSerializer + extends JsonSerializer { + @Override + public void serialize( + NccEgressDefaultRules value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccEgressDefaultRulesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccEgressDefaultRulesDeserializer + extends JsonDeserializer { + @Override + public NccEgressDefaultRules deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccEgressDefaultRulesPb pb = mapper.readValue(p, NccEgressDefaultRulesPb.class); + return NccEgressDefaultRules.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRulesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRulesPb.java new file mode 100755 index 000000000..0a54d5d94 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRulesPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Default rules don't have specific targets. */ +@Generated +class NccEgressDefaultRulesPb { + @JsonProperty("aws_stable_ip_rule") + private NccAwsStableIpRule awsStableIpRule; + + @JsonProperty("azure_service_endpoint_rule") + private NccAzureServiceEndpointRule azureServiceEndpointRule; + + public NccEgressDefaultRulesPb setAwsStableIpRule(NccAwsStableIpRule awsStableIpRule) { + this.awsStableIpRule = awsStableIpRule; + return this; + } + + public NccAwsStableIpRule getAwsStableIpRule() { + return awsStableIpRule; + } + + public NccEgressDefaultRulesPb setAzureServiceEndpointRule( + NccAzureServiceEndpointRule azureServiceEndpointRule) { + this.azureServiceEndpointRule = azureServiceEndpointRule; + return this; + } + + public NccAzureServiceEndpointRule getAzureServiceEndpointRule() { + return azureServiceEndpointRule; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccEgressDefaultRulesPb that = (NccEgressDefaultRulesPb) o; + return Objects.equals(awsStableIpRule, that.awsStableIpRule) + && Objects.equals(azureServiceEndpointRule, that.azureServiceEndpointRule); + } + + @Override + public int hashCode() { + return Objects.hash(awsStableIpRule, azureServiceEndpointRule); + } + + @Override + public String toString() { + return new ToStringer(NccEgressDefaultRulesPb.class) + .add("awsStableIpRule", awsStableIpRule) + .add("azureServiceEndpointRule", azureServiceEndpointRule) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java index 4cb399bdf..aacbd161b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java @@ -4,17 +4,43 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Target rule controls the egress rules that are dedicated to specific resources. */ @Generated +@JsonSerialize(using = NccEgressTargetRules.NccEgressTargetRulesSerializer.class) +@JsonDeserialize(using = NccEgressTargetRules.NccEgressTargetRulesDeserializer.class) public class NccEgressTargetRules { + /** AWS private endpoint rule controls the AWS private endpoint based egress rules. */ + private Collection + awsPrivateEndpointRules; + /** */ - @JsonProperty("azure_private_endpoint_rules") private Collection azurePrivateEndpointRules; + public NccEgressTargetRules setAwsPrivateEndpointRules( + Collection + awsPrivateEndpointRules) { + this.awsPrivateEndpointRules = awsPrivateEndpointRules; + return this; + } + + public Collection + getAwsPrivateEndpointRules() { + return awsPrivateEndpointRules; + } + public NccEgressTargetRules setAzurePrivateEndpointRules( Collection azurePrivateEndpointRules) { this.azurePrivateEndpointRules = azurePrivateEndpointRules; @@ -30,18 +56,58 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NccEgressTargetRules that = (NccEgressTargetRules) o; - return Objects.equals(azurePrivateEndpointRules, that.azurePrivateEndpointRules); + return Objects.equals(awsPrivateEndpointRules, that.awsPrivateEndpointRules) + && Objects.equals(azurePrivateEndpointRules, that.azurePrivateEndpointRules); } @Override public int hashCode() { - return Objects.hash(azurePrivateEndpointRules); + return Objects.hash(awsPrivateEndpointRules, azurePrivateEndpointRules); } @Override public String toString() { return new ToStringer(NccEgressTargetRules.class) + .add("awsPrivateEndpointRules", awsPrivateEndpointRules) .add("azurePrivateEndpointRules", azurePrivateEndpointRules) .toString(); } + + NccEgressTargetRulesPb toPb() { + NccEgressTargetRulesPb pb = new NccEgressTargetRulesPb(); + pb.setAwsPrivateEndpointRules(awsPrivateEndpointRules); + pb.setAzurePrivateEndpointRules(azurePrivateEndpointRules); + + return pb; + } + + static NccEgressTargetRules fromPb(NccEgressTargetRulesPb pb) { + NccEgressTargetRules model = new NccEgressTargetRules(); + model.setAwsPrivateEndpointRules(pb.getAwsPrivateEndpointRules()); + model.setAzurePrivateEndpointRules(pb.getAzurePrivateEndpointRules()); + + return model; + } + + public static class NccEgressTargetRulesSerializer extends JsonSerializer { + @Override + public void serialize( + NccEgressTargetRules value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccEgressTargetRulesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccEgressTargetRulesDeserializer + extends JsonDeserializer { + @Override + public NccEgressTargetRules deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccEgressTargetRulesPb pb = mapper.readValue(p, NccEgressTargetRulesPb.class); + return NccEgressTargetRules.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRulesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRulesPb.java new file mode 100755 index 000000000..ffa444da3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRulesPb.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Target rule controls the egress rules that are dedicated to specific resources. */ +@Generated +class NccEgressTargetRulesPb { + @JsonProperty("aws_private_endpoint_rules") + private Collection + awsPrivateEndpointRules; + + @JsonProperty("azure_private_endpoint_rules") + private Collection azurePrivateEndpointRules; + + public NccEgressTargetRulesPb setAwsPrivateEndpointRules( + Collection + awsPrivateEndpointRules) { + this.awsPrivateEndpointRules = awsPrivateEndpointRules; + return this; + } + + public Collection + getAwsPrivateEndpointRules() { + return awsPrivateEndpointRules; + } + + public NccEgressTargetRulesPb setAzurePrivateEndpointRules( + Collection azurePrivateEndpointRules) { + this.azurePrivateEndpointRules = azurePrivateEndpointRules; + return this; + } + + public Collection getAzurePrivateEndpointRules() { + return azurePrivateEndpointRules; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccEgressTargetRulesPb that = (NccEgressTargetRulesPb) o; + return Objects.equals(awsPrivateEndpointRules, that.awsPrivateEndpointRules) + && Objects.equals(azurePrivateEndpointRules, that.azurePrivateEndpointRules); + } + + @Override + public int hashCode() { + return Objects.hash(awsPrivateEndpointRules, azurePrivateEndpointRules); + } + + @Override + public String toString() { + return new ToStringer(NccEgressTargetRulesPb.class) + .add("awsPrivateEndpointRules", awsPrivateEndpointRules) + .add("azurePrivateEndpointRules", azurePrivateEndpointRules) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java new file mode 100755 index 000000000..77183cc08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java @@ -0,0 +1,393 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +@JsonSerialize(using = NccPrivateEndpointRule.NccPrivateEndpointRuleSerializer.class) +@JsonDeserialize(using = NccPrivateEndpointRule.NccPrivateEndpointRuleDeserializer.class) +public class NccPrivateEndpointRule { + /** Databricks account ID. You can find your account ID from the Accounts Console. */ + private String accountId; + + /** + * The current status of this private endpoint. The private endpoint rules are effective only if + * the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + * resources in the Cloud console before they take effect. The possible values are: - PENDING: The + * endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + * and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected + * by the private link resource owner. - DISCONNECTED: Connection was removed by the private link + * resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + * EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED. + */ + private NccPrivateEndpointRulePrivateLinkConnectionState connectionState; + + /** Time in epoch milliseconds when this object was created. */ + private Long creationTime; + + /** Whether this private endpoint is deactivated. */ + private Boolean deactivated; + + /** Time in epoch milliseconds when this object was deactivated. */ + private Long deactivatedAt; + + /** + * Only used by private endpoints to customer-managed private endpoint services. + * + *

Domain names of target private link service. When updating this field, the full list of + * target domain_names must be specified. + */ + private Collection domainNames; + + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + private Boolean enabled; + + /** The name of the Azure private endpoint resource. */ + private String endpointName; + + /** + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + private String endpointService; + + /** + * Not used by customer-managed private endpoint services. + * + *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace + * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. + */ + private String groupId; + + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ + private String networkConnectivityConfigId; + + /** The Azure resource ID of the target resource. */ + private String resourceId; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + private Collection resourceNames; + + /** The ID of a private endpoint rule. */ + private String ruleId; + + /** Time in epoch milliseconds when this object was updated. */ + private Long updatedTime; + + /** + * The AWS VPC endpoint ID. You can use this ID to identify the VPC endpoint created by + * Databricks. + */ + private String vpcEndpointId; + + public NccPrivateEndpointRule setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public NccPrivateEndpointRule setConnectionState( + NccPrivateEndpointRulePrivateLinkConnectionState connectionState) { + this.connectionState = connectionState; + return this; + } + + public NccPrivateEndpointRulePrivateLinkConnectionState getConnectionState() { + return connectionState; + } + + public NccPrivateEndpointRule setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NccPrivateEndpointRule setDeactivated(Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public NccPrivateEndpointRule setDeactivatedAt(Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public NccPrivateEndpointRule setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public NccPrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public NccPrivateEndpointRule setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public NccPrivateEndpointRule setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public NccPrivateEndpointRule setGroupId(String groupId) { + this.groupId = groupId; + return this; + } + + public String getGroupId() { + return groupId; + } + + public NccPrivateEndpointRule setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public NccPrivateEndpointRule setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public NccPrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public NccPrivateEndpointRule setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public NccPrivateEndpointRule setUpdatedTime(Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public NccPrivateEndpointRule setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccPrivateEndpointRule that = (NccPrivateEndpointRule) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(groupId, that.groupId) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointName, + endpointService, + groupId, + networkConnectivityConfigId, + resourceId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(NccPrivateEndpointRule.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointName", endpointName) + .add("endpointService", endpointService) + .add("groupId", groupId) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceId", resourceId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } + + NccPrivateEndpointRulePb toPb() { + NccPrivateEndpointRulePb pb = new NccPrivateEndpointRulePb(); + pb.setAccountId(accountId); + pb.setConnectionState(connectionState); + pb.setCreationTime(creationTime); + pb.setDeactivated(deactivated); + pb.setDeactivatedAt(deactivatedAt); + pb.setDomainNames(domainNames); + pb.setEnabled(enabled); + pb.setEndpointName(endpointName); + pb.setEndpointService(endpointService); + pb.setGroupId(groupId); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setResourceId(resourceId); + pb.setResourceNames(resourceNames); + pb.setRuleId(ruleId); + pb.setUpdatedTime(updatedTime); + pb.setVpcEndpointId(vpcEndpointId); + + return pb; + } + + static NccPrivateEndpointRule fromPb(NccPrivateEndpointRulePb pb) { + NccPrivateEndpointRule model = new NccPrivateEndpointRule(); + model.setAccountId(pb.getAccountId()); + model.setConnectionState(pb.getConnectionState()); + model.setCreationTime(pb.getCreationTime()); + model.setDeactivated(pb.getDeactivated()); + model.setDeactivatedAt(pb.getDeactivatedAt()); + model.setDomainNames(pb.getDomainNames()); + model.setEnabled(pb.getEnabled()); + model.setEndpointName(pb.getEndpointName()); + model.setEndpointService(pb.getEndpointService()); + model.setGroupId(pb.getGroupId()); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setResourceId(pb.getResourceId()); + model.setResourceNames(pb.getResourceNames()); + model.setRuleId(pb.getRuleId()); + model.setUpdatedTime(pb.getUpdatedTime()); + model.setVpcEndpointId(pb.getVpcEndpointId()); + + return model; + } + + public static class NccPrivateEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + NccPrivateEndpointRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NccPrivateEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NccPrivateEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public NccPrivateEndpointRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NccPrivateEndpointRulePb pb = mapper.readValue(p, NccPrivateEndpointRulePb.class); + return NccPrivateEndpointRule.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePb.java new file mode 100755 index 000000000..eacb25347 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePb.java @@ -0,0 +1,276 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +class NccPrivateEndpointRulePb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("connection_state") + private NccPrivateEndpointRulePrivateLinkConnectionState connectionState; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("deactivated") + private Boolean deactivated; + + @JsonProperty("deactivated_at") + private Long deactivatedAt; + + @JsonProperty("domain_names") + private Collection domainNames; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonProperty("endpoint_service") + private String endpointService; + + @JsonProperty("group_id") + private String groupId; + + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + @JsonProperty("resource_id") + private String resourceId; + + @JsonProperty("resource_names") + private Collection resourceNames; + + @JsonProperty("rule_id") + private String ruleId; + + @JsonProperty("updated_time") + private Long updatedTime; + + @JsonProperty("vpc_endpoint_id") + private String vpcEndpointId; + + public NccPrivateEndpointRulePb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public NccPrivateEndpointRulePb setConnectionState( + NccPrivateEndpointRulePrivateLinkConnectionState connectionState) { + this.connectionState = connectionState; + return this; + } + + public NccPrivateEndpointRulePrivateLinkConnectionState getConnectionState() { + return connectionState; + } + + public NccPrivateEndpointRulePb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NccPrivateEndpointRulePb setDeactivated(Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public NccPrivateEndpointRulePb setDeactivatedAt(Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public NccPrivateEndpointRulePb setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public NccPrivateEndpointRulePb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public NccPrivateEndpointRulePb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public NccPrivateEndpointRulePb setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public NccPrivateEndpointRulePb setGroupId(String groupId) { + this.groupId = groupId; + return this; + } + + public String getGroupId() { + return groupId; + } + + public NccPrivateEndpointRulePb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public NccPrivateEndpointRulePb setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public NccPrivateEndpointRulePb setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public NccPrivateEndpointRulePb setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public NccPrivateEndpointRulePb setUpdatedTime(Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public NccPrivateEndpointRulePb setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccPrivateEndpointRulePb that = (NccPrivateEndpointRulePb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(groupId, that.groupId) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointName, + endpointService, + groupId, + networkConnectivityConfigId, + resourceId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(NccPrivateEndpointRulePb.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointName", endpointName) + .add("endpointService", endpointService) + .add("groupId", groupId) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceId", resourceId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java new file mode 100755 index 000000000..0b0bcdebd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum NccPrivateEndpointRulePrivateLinkConnectionState { + DISCONNECTED, + ESTABLISHED, + EXPIRED, + PENDING, + REJECTED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java index 72ae3444f..275519332 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java @@ -62,7 +62,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( return impl.createNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( String networkConnectivityConfigId, CreatePrivateEndpointRule privateEndpointRule) { return createPrivateEndpointRule( new CreatePrivateEndpointRuleRequest() @@ -84,7 +84,7 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule( *

[serverless private link]: * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link */ - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest request) { return impl.createPrivateEndpointRule(request); } @@ -105,7 +105,7 @@ public void deleteNetworkConnectivityConfiguration( impl.deleteNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId) { return deletePrivateEndpointRule( new DeletePrivateEndpointRuleRequest() @@ -122,7 +122,7 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest request) { return impl.deletePrivateEndpointRule(request); } @@ -144,7 +144,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( return impl.getNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule getPrivateEndpointRule( + public NccPrivateEndpointRule getPrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId) { return getPrivateEndpointRule( new GetPrivateEndpointRuleRequest() @@ -157,7 +157,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule( * *

Gets the private endpoint rule. */ - public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { + public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { return impl.getPrivateEndpointRule(request); } @@ -181,7 +181,7 @@ public Iterable listNetworkConnectivityConfigu }); } - public Iterable listPrivateEndpointRules( + public Iterable listPrivateEndpointRules( String networkConnectivityConfigId) { return listPrivateEndpointRules( new ListPrivateEndpointRulesRequest() @@ -193,12 +193,12 @@ public Iterable listPrivateEndpointRules( * *

Gets an array of private endpoint rules. */ - public Iterable listPrivateEndpointRules( + public Iterable listPrivateEndpointRules( ListPrivateEndpointRulesRequest request) { return new Paginator<>( request, impl::listPrivateEndpointRules, - ListNccAzurePrivateEndpointRulesResponse::getItems, + ListPrivateEndpointRulesResponse::getItems, response -> { String token = response.getNextPageToken(); if (token == null || token.isEmpty()) { @@ -208,13 +208,13 @@ public Iterable listPrivateEndpointRules( }); } - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( + public NccPrivateEndpointRule updatePrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId, UpdatePrivateEndpointRule privateEndpointRule, String updateMask) { - return updateNccAzurePrivateEndpointRulePublic( - new UpdateNccAzurePrivateEndpointRulePublicRequest() + return updatePrivateEndpointRule( + new UpdateNccPrivateEndpointRuleRequest() .setNetworkConnectivityConfigId(networkConnectivityConfigId) .setPrivateEndpointRuleId(privateEndpointRuleId) .setPrivateEndpointRule(privateEndpointRule) @@ -227,9 +227,9 @@ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( *

Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed * resources is allowed to be updated. */ - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest request) { - return impl.updateNccAzurePrivateEndpointRulePublic(request); + public NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest request) { + return impl.updatePrivateEndpointRule(request); } public NetworkConnectivityService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java index 6c03595d4..8a717021b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java @@ -4,25 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Properties of the new network connectivity configuration. */ @Generated +@JsonSerialize( + using = NetworkConnectivityConfiguration.NetworkConnectivityConfigurationSerializer.class) +@JsonDeserialize( + using = NetworkConnectivityConfiguration.NetworkConnectivityConfigurationDeserializer.class) public class NetworkConnectivityConfiguration { - /** The Databricks account ID that hosts the credential. */ - @JsonProperty("account_id") + /** + * Your Databricks account ID. You can find your account ID in your Databricks accounts console. + */ private String accountId; /** Time in epoch milliseconds when this object was created. */ - @JsonProperty("creation_time") private Long creationTime; /** * The network connectivity rules that apply to network traffic from your serverless compute * resources. */ - @JsonProperty("egress_config") private NccEgressConfig egressConfig; /** @@ -30,22 +42,18 @@ public class NetworkConnectivityConfiguration { * characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name * must match the regular expression ^[0-9a-zA-Z-_]{3,30}$ */ - @JsonProperty("name") private String name; /** Databricks network connectivity configuration ID. */ - @JsonProperty("network_connectivity_config_id") private String networkConnectivityConfigId; /** * The region for the network connectivity configuration. Only workspaces in the same region can * be attached to the network connectivity configuration. */ - @JsonProperty("region") private String region; /** Time in epoch milliseconds when this object was updated. */ - @JsonProperty("updated_time") private Long updatedTime; public NetworkConnectivityConfiguration setAccountId(String accountId) { @@ -150,4 +158,54 @@ public String toString() { .add("updatedTime", updatedTime) .toString(); } + + NetworkConnectivityConfigurationPb toPb() { + NetworkConnectivityConfigurationPb pb = new NetworkConnectivityConfigurationPb(); + pb.setAccountId(accountId); + pb.setCreationTime(creationTime); + pb.setEgressConfig(egressConfig); + pb.setName(name); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setRegion(region); + pb.setUpdatedTime(updatedTime); + + return pb; + } + + static NetworkConnectivityConfiguration fromPb(NetworkConnectivityConfigurationPb pb) { + NetworkConnectivityConfiguration model = new NetworkConnectivityConfiguration(); + model.setAccountId(pb.getAccountId()); + model.setCreationTime(pb.getCreationTime()); + model.setEgressConfig(pb.getEgressConfig()); + model.setName(pb.getName()); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setRegion(pb.getRegion()); + model.setUpdatedTime(pb.getUpdatedTime()); + + return model; + } + + public static class NetworkConnectivityConfigurationSerializer + extends JsonSerializer { + @Override + public void serialize( + NetworkConnectivityConfiguration value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkConnectivityConfigurationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkConnectivityConfigurationDeserializer + extends JsonDeserializer { + @Override + public NetworkConnectivityConfiguration deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkConnectivityConfigurationPb pb = + mapper.readValue(p, NetworkConnectivityConfigurationPb.class); + return NetworkConnectivityConfiguration.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigurationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigurationPb.java new file mode 100755 index 000000000..0a97735d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigurationPb.java @@ -0,0 +1,136 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Properties of the new network connectivity configuration. */ +@Generated +class NetworkConnectivityConfigurationPb { + @JsonProperty("account_id") + private String accountId; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("egress_config") + private NccEgressConfig egressConfig; + + @JsonProperty("name") + private String name; + + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + @JsonProperty("region") + private String region; + + @JsonProperty("updated_time") + private Long updatedTime; + + public NetworkConnectivityConfigurationPb setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public NetworkConnectivityConfigurationPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NetworkConnectivityConfigurationPb setEgressConfig(NccEgressConfig egressConfig) { + this.egressConfig = egressConfig; + return this; + } + + public NccEgressConfig getEgressConfig() { + return egressConfig; + } + + public NetworkConnectivityConfigurationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public NetworkConnectivityConfigurationPb setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public NetworkConnectivityConfigurationPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public NetworkConnectivityConfigurationPb setUpdatedTime(Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkConnectivityConfigurationPb that = (NetworkConnectivityConfigurationPb) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(egressConfig, that.egressConfig) + && Objects.equals(name, that.name) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(region, that.region) + && Objects.equals(updatedTime, that.updatedTime); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + creationTime, + egressConfig, + name, + networkConnectivityConfigId, + region, + updatedTime); + } + + @Override + public String toString() { + return new ToStringer(NetworkConnectivityConfigurationPb.class) + .add("accountId", accountId) + .add("creationTime", creationTime) + .add("egressConfig", egressConfig) + .add("name", name) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("region", region) + .add("updatedTime", updatedTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java index 16b4dd419..8a349735e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java @@ -25,7 +25,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( try { Request req = new Request("POST", path, apiClient.serialize(request.getNetworkConnectivityConfig())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, NetworkConnectivityConfiguration.class); @@ -35,7 +35,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest request) { String path = String.format( @@ -44,10 +44,10 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule( try { Request req = new Request("POST", path, apiClient.serialize(request.getPrivateEndpointRule())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -62,7 +62,7 @@ public void deleteNetworkConnectivityConfiguration( apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteNetworkConnectivityConfigurationResponse.class); } catch (IOException e) { @@ -71,7 +71,7 @@ public void deleteNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest request) { String path = String.format( @@ -81,9 +81,9 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( request.getPrivateEndpointRuleId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -98,7 +98,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, NetworkConnectivityConfiguration.class); } catch (IOException e) { @@ -107,7 +107,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { + public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s", @@ -116,9 +116,9 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule request.getPrivateEndpointRuleId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -132,7 +132,7 @@ public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConf "/api/2.0/accounts/%s/network-connectivity-configs", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListNetworkConnectivityConfigurationsResponse.class); } catch (IOException e) { @@ -141,7 +141,7 @@ public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConf } @Override - public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + public ListPrivateEndpointRulesResponse listPrivateEndpointRules( ListPrivateEndpointRulesRequest request) { String path = String.format( @@ -149,17 +149,17 @@ public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, ListNccAzurePrivateEndpointRulesResponse.class); + return apiClient.execute(req, ListPrivateEndpointRulesResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest request) { + public NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest request) { String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s", @@ -169,10 +169,10 @@ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( try { Request req = new Request("PATCH", path, apiClient.serialize(request.getPrivateEndpointRule())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java index 55abae74d..eeaa80e88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java @@ -53,7 +53,7 @@ NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( *

[serverless private link]: * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link */ - NccAzurePrivateEndpointRule createPrivateEndpointRule( + NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest createPrivateEndpointRuleRequest); /** @@ -73,7 +73,7 @@ void deleteNetworkConnectivityConfiguration( * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ - NccAzurePrivateEndpointRule deletePrivateEndpointRule( + NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest deletePrivateEndpointRuleRequest); /** @@ -89,7 +89,7 @@ NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( * *

Gets the private endpoint rule. */ - NccAzurePrivateEndpointRule getPrivateEndpointRule( + NccPrivateEndpointRule getPrivateEndpointRule( GetPrivateEndpointRuleRequest getPrivateEndpointRuleRequest); /** @@ -105,7 +105,7 @@ ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurati * *

Gets an array of private endpoint rules. */ - ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + ListPrivateEndpointRulesResponse listPrivateEndpointRules( ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest); /** @@ -114,7 +114,6 @@ ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( *

Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed * resources is allowed to be updated. */ - NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest - updateNccAzurePrivateEndpointRulePublicRequest); + NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest updateNccPrivateEndpointRuleRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java index 1f9f29054..134a6c985 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java @@ -22,7 +22,7 @@ public AccountNetworkPolicy createNetworkPolicyRpc(CreateNetworkPolicyRequest re String.format("/api/2.0/accounts/%s/network-policies", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request.getNetworkPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountNetworkPolicy.class); @@ -39,7 +39,7 @@ public void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest request) { apiClient.configuredAccountID(), request.getNetworkPolicyId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteNetworkPolicyRpcResponse.class); } catch (IOException e) { @@ -55,7 +55,7 @@ public AccountNetworkPolicy getNetworkPolicyRpc(GetNetworkPolicyRequest request) apiClient.configuredAccountID(), request.getNetworkPolicyId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AccountNetworkPolicy.class); } catch (IOException e) { @@ -69,7 +69,7 @@ public ListNetworkPoliciesResponse listNetworkPoliciesRpc(ListNetworkPoliciesReq String.format("/api/2.0/accounts/%s/network-policies", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListNetworkPoliciesResponse.class); } catch (IOException e) { @@ -85,7 +85,7 @@ public AccountNetworkPolicy updateNetworkPolicyRpc(UpdateNetworkPolicyRequest re apiClient.configuredAccountID(), request.getNetworkPolicyId()); try { Request req = new Request("PUT", path, apiClient.serialize(request.getNetworkPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AccountNetworkPolicy.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java index a3b0dea02..b5fdb76c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -15,9 +24,10 @@ * API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/ */ @Generated +@JsonSerialize(using = NetworkPolicyEgress.NetworkPolicyEgressSerializer.class) +@JsonDeserialize(using = NetworkPolicyEgress.NetworkPolicyEgressDeserializer.class) public class NetworkPolicyEgress { /** The access policy enforced for egress traffic to the internet. */ - @JsonProperty("network_access") private EgressNetworkPolicyNetworkAccessPolicy networkAccess; public NetworkPolicyEgress setNetworkAccess( @@ -47,4 +57,39 @@ public int hashCode() { public String toString() { return new ToStringer(NetworkPolicyEgress.class).add("networkAccess", networkAccess).toString(); } + + NetworkPolicyEgressPb toPb() { + NetworkPolicyEgressPb pb = new NetworkPolicyEgressPb(); + pb.setNetworkAccess(networkAccess); + + return pb; + } + + static NetworkPolicyEgress fromPb(NetworkPolicyEgressPb pb) { + NetworkPolicyEgress model = new NetworkPolicyEgress(); + model.setNetworkAccess(pb.getNetworkAccess()); + + return model; + } + + public static class NetworkPolicyEgressSerializer extends JsonSerializer { + @Override + public void serialize(NetworkPolicyEgress value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NetworkPolicyEgressPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NetworkPolicyEgressDeserializer + extends JsonDeserializer { + @Override + public NetworkPolicyEgress deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NetworkPolicyEgressPb pb = mapper.readValue(p, NetworkPolicyEgressPb.class); + return NetworkPolicyEgress.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgressPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgressPb.java new file mode 100755 index 000000000..0aabc610e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgressPb.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The network policies applying for egress traffic. This message is used by the UI/REST API. We + * translate this message to the format expected by the dataplane in Lakehouse Network Manager (for + * the format expected by the dataplane, see networkconfig.textproto). This policy should be + * consistent with [[com.databricks.api.proto.settingspolicy.EgressNetworkPolicy]]. Details see + * API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/ + */ +@Generated +class NetworkPolicyEgressPb { + @JsonProperty("network_access") + private EgressNetworkPolicyNetworkAccessPolicy networkAccess; + + public NetworkPolicyEgressPb setNetworkAccess( + EgressNetworkPolicyNetworkAccessPolicy networkAccess) { + this.networkAccess = networkAccess; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicy getNetworkAccess() { + return networkAccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkPolicyEgressPb that = (NetworkPolicyEgressPb) o; + return Objects.equals(networkAccess, that.networkAccess); + } + + @Override + public int hashCode() { + return Objects.hash(networkAccess); + } + + @Override + public String toString() { + return new ToStringer(NetworkPolicyEgressPb.class) + .add("networkAccess", networkAccess) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestination.java index 15289ade8..06a9e9f6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestination.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NotificationDestination.NotificationDestinationSerializer.class) +@JsonDeserialize(using = NotificationDestination.NotificationDestinationDeserializer.class) public class NotificationDestination { /** * The configuration for the notification destination. Will be exactly one of the nested configs. * Only returns for users with workspace admin permissions. */ - @JsonProperty("config") private Config config; /** * [Output-only] The type of the notification destination. The type can not be changed once set. */ - @JsonProperty("destination_type") private DestinationType destinationType; /** The display name for the notification destination. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying notification destination. */ - @JsonProperty("id") private String id; public NotificationDestination setConfig(Config config) { @@ -91,4 +98,47 @@ public String toString() { .add("id", id) .toString(); } + + NotificationDestinationPb toPb() { + NotificationDestinationPb pb = new NotificationDestinationPb(); + pb.setConfig(config); + pb.setDestinationType(destinationType); + pb.setDisplayName(displayName); + pb.setId(id); + + return pb; + } + + static NotificationDestination fromPb(NotificationDestinationPb pb) { + NotificationDestination model = new NotificationDestination(); + model.setConfig(pb.getConfig()); + model.setDestinationType(pb.getDestinationType()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + + return model; + } + + public static class NotificationDestinationSerializer + extends JsonSerializer { + @Override + public void serialize( + NotificationDestination value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotificationDestinationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotificationDestinationDeserializer + extends JsonDeserializer { + @Override + public NotificationDestination deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotificationDestinationPb pb = mapper.readValue(p, NotificationDestinationPb.class); + return NotificationDestination.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationPb.java new file mode 100755 index 000000000..afb5594cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NotificationDestinationPb { + @JsonProperty("config") + private Config config; + + @JsonProperty("destination_type") + private DestinationType destinationType; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + public NotificationDestinationPb setConfig(Config config) { + this.config = config; + return this; + } + + public Config getConfig() { + return config; + } + + public NotificationDestinationPb setDestinationType(DestinationType destinationType) { + this.destinationType = destinationType; + return this; + } + + public DestinationType getDestinationType() { + return destinationType; + } + + public NotificationDestinationPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public NotificationDestinationPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotificationDestinationPb that = (NotificationDestinationPb) o; + return Objects.equals(config, that.config) + && Objects.equals(destinationType, that.destinationType) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(config, destinationType, displayName, id); + } + + @Override + public String toString() { + return new ToStringer(NotificationDestinationPb.class) + .add("config", config) + .add("destinationType", destinationType) + .add("displayName", displayName) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java index 498afdf6b..f26808b0b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java @@ -21,7 +21,7 @@ public NotificationDestination create(CreateNotificationDestinationRequest reque String path = "/api/2.0/notification-destinations"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, NotificationDestination.class); @@ -35,7 +35,7 @@ public void delete(DeleteNotificationDestinationRequest request) { String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, Empty.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public NotificationDestination get(GetNotificationDestinationRequest request) { String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, NotificationDestination.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListNotificationDestinationsResponse list(ListNotificationDestinationsReq String path = "/api/2.0/notification-destinations"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListNotificationDestinationsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public NotificationDestination update(UpdateNotificationDestinationRequest reque String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, NotificationDestination.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfig.java index 6d6dfd08b..51217b893 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfig.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PagerdutyConfig.PagerdutyConfigSerializer.class) +@JsonDeserialize(using = PagerdutyConfig.PagerdutyConfigDeserializer.class) public class PagerdutyConfig { /** [Input-Only] Integration key for PagerDuty. */ - @JsonProperty("integration_key") private String integrationKey; /** [Output-Only] Whether integration key is set. */ - @JsonProperty("integration_key_set") private Boolean integrationKeySet; public PagerdutyConfig setIntegrationKey(String integrationKey) { @@ -56,4 +65,40 @@ public String toString() { .add("integrationKeySet", integrationKeySet) .toString(); } + + PagerdutyConfigPb toPb() { + PagerdutyConfigPb pb = new PagerdutyConfigPb(); + pb.setIntegrationKey(integrationKey); + pb.setIntegrationKeySet(integrationKeySet); + + return pb; + } + + static PagerdutyConfig fromPb(PagerdutyConfigPb pb) { + PagerdutyConfig model = new PagerdutyConfig(); + model.setIntegrationKey(pb.getIntegrationKey()); + model.setIntegrationKeySet(pb.getIntegrationKeySet()); + + return model; + } + + public static class PagerdutyConfigSerializer extends JsonSerializer { + @Override + public void serialize(PagerdutyConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PagerdutyConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PagerdutyConfigDeserializer extends JsonDeserializer { + @Override + public PagerdutyConfig deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PagerdutyConfigPb pb = mapper.readValue(p, PagerdutyConfigPb.class); + return PagerdutyConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfigPb.java new file mode 100755 index 000000000..19fb2cf23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PagerdutyConfigPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PagerdutyConfigPb { + @JsonProperty("integration_key") + private String integrationKey; + + @JsonProperty("integration_key_set") + private Boolean integrationKeySet; + + public PagerdutyConfigPb setIntegrationKey(String integrationKey) { + this.integrationKey = integrationKey; + return this; + } + + public String getIntegrationKey() { + return integrationKey; + } + + public PagerdutyConfigPb setIntegrationKeySet(Boolean integrationKeySet) { + this.integrationKeySet = integrationKeySet; + return this; + } + + public Boolean getIntegrationKeySet() { + return integrationKeySet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PagerdutyConfigPb that = (PagerdutyConfigPb) o; + return Objects.equals(integrationKey, that.integrationKey) + && Objects.equals(integrationKeySet, that.integrationKeySet); + } + + @Override + public int hashCode() { + return Objects.hash(integrationKey, integrationKeySet); + } + + @Override + public String toString() { + return new ToStringer(PagerdutyConfigPb.class) + .add("integrationKey", integrationKey) + .add("integrationKeySet", integrationKeySet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionId.java index edd9e8548..bbc94c5d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionId.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionId.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Partition by workspace or account */ @Generated +@JsonSerialize(using = PartitionId.PartitionIdSerializer.class) +@JsonDeserialize(using = PartitionId.PartitionIdDeserializer.class) public class PartitionId { /** The ID of the workspace. */ - @JsonProperty("workspaceId") private Long workspaceId; public PartitionId setWorkspaceId(Long workspaceId) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(PartitionId.class).add("workspaceId", workspaceId).toString(); } + + PartitionIdPb toPb() { + PartitionIdPb pb = new PartitionIdPb(); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static PartitionId fromPb(PartitionIdPb pb) { + PartitionId model = new PartitionId(); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class PartitionIdSerializer extends JsonSerializer { + @Override + public void serialize(PartitionId value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PartitionIdPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PartitionIdDeserializer extends JsonDeserializer { + @Override + public PartitionId deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PartitionIdPb pb = mapper.readValue(p, PartitionIdPb.class); + return PartitionId.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionIdPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionIdPb.java new file mode 100755 index 000000000..e40acf9fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionIdPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Partition by workspace or account */ +@Generated +class PartitionIdPb { + @JsonProperty("workspaceId") + private Long workspaceId; + + public PartitionIdPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PartitionIdPb that = (PartitionIdPb) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(PartitionIdPb.class).add("workspaceId", workspaceId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java index f584b80af..aa8d9261c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java @@ -24,7 +24,7 @@ public DeletePersonalComputeSettingResponse delete(DeletePersonalComputeSettingR apiClient.configuredAccountID()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeletePersonalComputeSettingResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public PersonalComputeSetting get(GetPersonalComputeSettingRequest request) { apiClient.configuredAccountID()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, PersonalComputeSetting.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public PersonalComputeSetting update(UpdatePersonalComputeSettingRequest request apiClient.configuredAccountID()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PersonalComputeSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java index 5dfe405f2..c89960b79 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PersonalComputeMessage.PersonalComputeMessageSerializer.class) +@JsonDeserialize(using = PersonalComputeMessage.PersonalComputeMessageDeserializer.class) public class PersonalComputeMessage { /** * ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing @@ -16,7 +27,6 @@ public class PersonalComputeMessage { * groups to be added to the ACLs of that workspace’s Personal Compute default policy before they * will be able to create compute resources through that policy. */ - @JsonProperty("value") private PersonalComputeMessageEnum value; public PersonalComputeMessage setValue(PersonalComputeMessageEnum value) { @@ -45,4 +55,41 @@ public int hashCode() { public String toString() { return new ToStringer(PersonalComputeMessage.class).add("value", value).toString(); } + + PersonalComputeMessagePb toPb() { + PersonalComputeMessagePb pb = new PersonalComputeMessagePb(); + pb.setValue(value); + + return pb; + } + + static PersonalComputeMessage fromPb(PersonalComputeMessagePb pb) { + PersonalComputeMessage model = new PersonalComputeMessage(); + model.setValue(pb.getValue()); + + return model; + } + + public static class PersonalComputeMessageSerializer + extends JsonSerializer { + @Override + public void serialize( + PersonalComputeMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PersonalComputeMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PersonalComputeMessageDeserializer + extends JsonDeserializer { + @Override + public PersonalComputeMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PersonalComputeMessagePb pb = mapper.readValue(p, PersonalComputeMessagePb.class); + return PersonalComputeMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessagePb.java new file mode 100755 index 000000000..5605468c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessagePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PersonalComputeMessagePb { + @JsonProperty("value") + private PersonalComputeMessageEnum value; + + public PersonalComputeMessagePb setValue(PersonalComputeMessageEnum value) { + this.value = value; + return this; + } + + public PersonalComputeMessageEnum getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PersonalComputeMessagePb that = (PersonalComputeMessagePb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(PersonalComputeMessagePb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java index 6f1d07b73..0a36a9640 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PersonalComputeSetting.PersonalComputeSettingSerializer.class) +@JsonDeserialize(using = PersonalComputeSetting.PersonalComputeSettingDeserializer.class) public class PersonalComputeSetting { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -17,11 +28,9 @@ public class PersonalComputeSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** */ - @JsonProperty("personal_compute") private PersonalComputeMessage personalCompute; /** @@ -30,7 +39,6 @@ public class PersonalComputeSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public PersonalComputeSetting setEtag(String etag) { @@ -83,4 +91,45 @@ public String toString() { .add("settingName", settingName) .toString(); } + + PersonalComputeSettingPb toPb() { + PersonalComputeSettingPb pb = new PersonalComputeSettingPb(); + pb.setEtag(etag); + pb.setPersonalCompute(personalCompute); + pb.setSettingName(settingName); + + return pb; + } + + static PersonalComputeSetting fromPb(PersonalComputeSettingPb pb) { + PersonalComputeSetting model = new PersonalComputeSetting(); + model.setEtag(pb.getEtag()); + model.setPersonalCompute(pb.getPersonalCompute()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class PersonalComputeSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + PersonalComputeSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PersonalComputeSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PersonalComputeSettingDeserializer + extends JsonDeserializer { + @Override + public PersonalComputeSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PersonalComputeSettingPb pb = mapper.readValue(p, PersonalComputeSettingPb.class); + return PersonalComputeSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSettingPb.java new file mode 100755 index 000000000..156e497d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSettingPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PersonalComputeSettingPb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("personal_compute") + private PersonalComputeMessage personalCompute; + + @JsonProperty("setting_name") + private String settingName; + + public PersonalComputeSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public PersonalComputeSettingPb setPersonalCompute(PersonalComputeMessage personalCompute) { + this.personalCompute = personalCompute; + return this; + } + + public PersonalComputeMessage getPersonalCompute() { + return personalCompute; + } + + public PersonalComputeSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PersonalComputeSettingPb that = (PersonalComputeSettingPb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(personalCompute, that.personalCompute) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(etag, personalCompute, settingName); + } + + @Override + public String toString() { + return new ToStringer(PersonalComputeSettingPb.class) + .add("etag", etag) + .add("personalCompute", personalCompute) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java index 669916616..91e2bf3a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PublicTokenInfo.PublicTokenInfoSerializer.class) +@JsonDeserialize(using = PublicTokenInfo.PublicTokenInfoDeserializer.class) public class PublicTokenInfo { /** Comment the token was created with, if applicable. */ - @JsonProperty("comment") private String comment; /** Server time (in epoch milliseconds) when the token was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Server time (in epoch milliseconds) when the token will expire, or -1 if not applicable. */ - @JsonProperty("expiry_time") private Long expiryTime; /** The ID of this token. */ - @JsonProperty("token_id") private String tokenId; public PublicTokenInfo setComment(String comment) { @@ -86,4 +93,44 @@ public String toString() { .add("tokenId", tokenId) .toString(); } + + PublicTokenInfoPb toPb() { + PublicTokenInfoPb pb = new PublicTokenInfoPb(); + pb.setComment(comment); + pb.setCreationTime(creationTime); + pb.setExpiryTime(expiryTime); + pb.setTokenId(tokenId); + + return pb; + } + + static PublicTokenInfo fromPb(PublicTokenInfoPb pb) { + PublicTokenInfo model = new PublicTokenInfo(); + model.setComment(pb.getComment()); + model.setCreationTime(pb.getCreationTime()); + model.setExpiryTime(pb.getExpiryTime()); + model.setTokenId(pb.getTokenId()); + + return model; + } + + public static class PublicTokenInfoSerializer extends JsonSerializer { + @Override + public void serialize(PublicTokenInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PublicTokenInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PublicTokenInfoDeserializer extends JsonDeserializer { + @Override + public PublicTokenInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PublicTokenInfoPb pb = mapper.readValue(p, PublicTokenInfoPb.class); + return PublicTokenInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfoPb.java new file mode 100755 index 000000000..f9f5f57f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfoPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PublicTokenInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("expiry_time") + private Long expiryTime; + + @JsonProperty("token_id") + private String tokenId; + + public PublicTokenInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public PublicTokenInfoPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public PublicTokenInfoPb setExpiryTime(Long expiryTime) { + this.expiryTime = expiryTime; + return this; + } + + public Long getExpiryTime() { + return expiryTime; + } + + public PublicTokenInfoPb setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PublicTokenInfoPb that = (PublicTokenInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(expiryTime, that.expiryTime) + && Objects.equals(tokenId, that.tokenId); + } + + @Override + public int hashCode() { + return Objects.hash(comment, creationTime, expiryTime, tokenId); + } + + @Override + public String toString() { + return new ToStringer(PublicTokenInfoPb.class) + .add("comment", comment) + .add("creationTime", creationTime) + .add("expiryTime", expiryTime) + .add("tokenId", tokenId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java index b7c9d65e1..3d8379841 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java @@ -4,27 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Details required to replace an IP access list. */ @Generated +@JsonSerialize(using = ReplaceIpAccessList.ReplaceIpAccessListSerializer.class) +@JsonDeserialize(using = ReplaceIpAccessList.ReplaceIpAccessListDeserializer.class) public class ReplaceIpAccessList { /** Specifies whether this IP access list is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; /** */ - @JsonProperty("ip_addresses") private Collection ipAddresses; /** Label for the IP access list. This **cannot** be empty. */ - @JsonProperty("label") private String label; /** @@ -34,7 +41,6 @@ public class ReplaceIpAccessList { * or range. IP addresses in the block list are excluded even if they are included in an allow * list. */ - @JsonProperty("list_type") private ListType listType; public ReplaceIpAccessList setEnabled(Boolean enabled) { @@ -109,4 +115,47 @@ public String toString() { .add("listType", listType) .toString(); } + + ReplaceIpAccessListPb toPb() { + ReplaceIpAccessListPb pb = new ReplaceIpAccessListPb(); + pb.setEnabled(enabled); + pb.setIpAccessListId(ipAccessListId); + pb.setIpAddresses(ipAddresses); + pb.setLabel(label); + pb.setListType(listType); + + return pb; + } + + static ReplaceIpAccessList fromPb(ReplaceIpAccessListPb pb) { + ReplaceIpAccessList model = new ReplaceIpAccessList(); + model.setEnabled(pb.getEnabled()); + model.setIpAccessListId(pb.getIpAccessListId()); + model.setIpAddresses(pb.getIpAddresses()); + model.setLabel(pb.getLabel()); + model.setListType(pb.getListType()); + + return model; + } + + public static class ReplaceIpAccessListSerializer extends JsonSerializer { + @Override + public void serialize(ReplaceIpAccessList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReplaceIpAccessListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReplaceIpAccessListDeserializer + extends JsonDeserializer { + @Override + public ReplaceIpAccessList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReplaceIpAccessListPb pb = mapper.readValue(p, ReplaceIpAccessListPb.class); + return ReplaceIpAccessList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessListPb.java new file mode 100755 index 000000000..5a297e66e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessListPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Details required to replace an IP access list. */ +@Generated +class ReplaceIpAccessListPb { + @JsonProperty("enabled") + private Boolean enabled; + + @JsonIgnore private String ipAccessListId; + + @JsonProperty("ip_addresses") + private Collection ipAddresses; + + @JsonProperty("label") + private String label; + + @JsonProperty("list_type") + private ListType listType; + + public ReplaceIpAccessListPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public ReplaceIpAccessListPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + public ReplaceIpAccessListPb setIpAddresses(Collection ipAddresses) { + this.ipAddresses = ipAddresses; + return this; + } + + public Collection getIpAddresses() { + return ipAddresses; + } + + public ReplaceIpAccessListPb setLabel(String label) { + this.label = label; + return this; + } + + public String getLabel() { + return label; + } + + public ReplaceIpAccessListPb setListType(ListType listType) { + this.listType = listType; + return this; + } + + public ListType getListType() { + return listType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReplaceIpAccessListPb that = (ReplaceIpAccessListPb) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(ipAccessListId, that.ipAccessListId) + && Objects.equals(ipAddresses, that.ipAddresses) + && Objects.equals(label, that.label) + && Objects.equals(listType, that.listType); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, ipAccessListId, ipAddresses, label, listType); + } + + @Override + public String toString() { + return new ToStringer(ReplaceIpAccessListPb.class) + .add("enabled", enabled) + .add("ipAccessListId", ipAccessListId) + .add("ipAddresses", ipAddresses) + .add("label", label) + .add("listType", listType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java index 5a3ba7e51..edcc09a7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ReplaceResponse.ReplaceResponseSerializer.class) +@JsonDeserialize(using = ReplaceResponse.ReplaceResponseDeserializer.class) public class ReplaceResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(ReplaceResponse.class).toString(); } + + ReplaceResponsePb toPb() { + ReplaceResponsePb pb = new ReplaceResponsePb(); + + return pb; + } + + static ReplaceResponse fromPb(ReplaceResponsePb pb) { + ReplaceResponse model = new ReplaceResponse(); + + return model; + } + + public static class ReplaceResponseSerializer extends JsonSerializer { + @Override + public void serialize(ReplaceResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ReplaceResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ReplaceResponseDeserializer extends JsonDeserializer { + @Override + public ReplaceResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ReplaceResponsePb pb = mapper.readValue(p, ReplaceResponsePb.class); + return ReplaceResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponsePb.java new file mode 100755 index 000000000..0cbd0015b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ReplaceResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ReplaceResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java index 69e5aa56a..2753b9eca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java @@ -22,7 +22,7 @@ public DeleteRestrictWorkspaceAdminsSettingResponse delete( String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteRestrictWorkspaceAdminsSettingResponse.class); } catch (IOException e) { @@ -35,7 +35,7 @@ public RestrictWorkspaceAdminsSetting get(GetRestrictWorkspaceAdminsSettingReque String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RestrictWorkspaceAdminsSetting.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public RestrictWorkspaceAdminsSetting update( String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RestrictWorkspaceAdminsSetting.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java index 75a74aa91..34e02d67b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = RestrictWorkspaceAdminsMessage.RestrictWorkspaceAdminsMessageSerializer.class) +@JsonDeserialize( + using = RestrictWorkspaceAdminsMessage.RestrictWorkspaceAdminsMessageDeserializer.class) public class RestrictWorkspaceAdminsMessage { /** */ - @JsonProperty("status") private RestrictWorkspaceAdminsMessageStatus status; public RestrictWorkspaceAdminsMessage setStatus(RestrictWorkspaceAdminsMessageStatus status) { @@ -39,4 +51,42 @@ public int hashCode() { public String toString() { return new ToStringer(RestrictWorkspaceAdminsMessage.class).add("status", status).toString(); } + + RestrictWorkspaceAdminsMessagePb toPb() { + RestrictWorkspaceAdminsMessagePb pb = new RestrictWorkspaceAdminsMessagePb(); + pb.setStatus(status); + + return pb; + } + + static RestrictWorkspaceAdminsMessage fromPb(RestrictWorkspaceAdminsMessagePb pb) { + RestrictWorkspaceAdminsMessage model = new RestrictWorkspaceAdminsMessage(); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class RestrictWorkspaceAdminsMessageSerializer + extends JsonSerializer { + @Override + public void serialize( + RestrictWorkspaceAdminsMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestrictWorkspaceAdminsMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestrictWorkspaceAdminsMessageDeserializer + extends JsonDeserializer { + @Override + public RestrictWorkspaceAdminsMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestrictWorkspaceAdminsMessagePb pb = + mapper.readValue(p, RestrictWorkspaceAdminsMessagePb.class); + return RestrictWorkspaceAdminsMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessagePb.java similarity index 55% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessagePb.java index 3476fb9ef..047809e59 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessagePb.java @@ -1,26 +1,23 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.settings; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; @Generated -public class CancelQueryExecutionResponse { - /** */ +class RestrictWorkspaceAdminsMessagePb { @JsonProperty("status") - private Collection status; + private RestrictWorkspaceAdminsMessageStatus status; - public CancelQueryExecutionResponse setStatus( - Collection status) { + public RestrictWorkspaceAdminsMessagePb setStatus(RestrictWorkspaceAdminsMessageStatus status) { this.status = status; return this; } - public Collection getStatus() { + public RestrictWorkspaceAdminsMessageStatus getStatus() { return status; } @@ -28,7 +25,7 @@ public Collection getStatus() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o; + RestrictWorkspaceAdminsMessagePb that = (RestrictWorkspaceAdminsMessagePb) o; return Objects.equals(status, that.status); } @@ -39,6 +36,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString(); + return new ToStringer(RestrictWorkspaceAdminsMessagePb.class).add("status", status).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java index 2eaf3a50f..3da514d54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java @@ -4,10 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = RestrictWorkspaceAdminsSetting.RestrictWorkspaceAdminsSettingSerializer.class) +@JsonDeserialize( + using = RestrictWorkspaceAdminsSetting.RestrictWorkspaceAdminsSettingDeserializer.class) public class RestrictWorkspaceAdminsSetting { /** * etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -17,11 +30,9 @@ public class RestrictWorkspaceAdminsSetting { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** */ - @JsonProperty("restrict_workspace_admins") private RestrictWorkspaceAdminsMessage restrictWorkspaceAdmins; /** @@ -30,7 +41,6 @@ public class RestrictWorkspaceAdminsSetting { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public RestrictWorkspaceAdminsSetting setEtag(String etag) { @@ -84,4 +94,46 @@ public String toString() { .add("settingName", settingName) .toString(); } + + RestrictWorkspaceAdminsSettingPb toPb() { + RestrictWorkspaceAdminsSettingPb pb = new RestrictWorkspaceAdminsSettingPb(); + pb.setEtag(etag); + pb.setRestrictWorkspaceAdmins(restrictWorkspaceAdmins); + pb.setSettingName(settingName); + + return pb; + } + + static RestrictWorkspaceAdminsSetting fromPb(RestrictWorkspaceAdminsSettingPb pb) { + RestrictWorkspaceAdminsSetting model = new RestrictWorkspaceAdminsSetting(); + model.setEtag(pb.getEtag()); + model.setRestrictWorkspaceAdmins(pb.getRestrictWorkspaceAdmins()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class RestrictWorkspaceAdminsSettingSerializer + extends JsonSerializer { + @Override + public void serialize( + RestrictWorkspaceAdminsSetting value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestrictWorkspaceAdminsSettingPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestrictWorkspaceAdminsSettingDeserializer + extends JsonDeserializer { + @Override + public RestrictWorkspaceAdminsSetting deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestrictWorkspaceAdminsSettingPb pb = + mapper.readValue(p, RestrictWorkspaceAdminsSettingPb.class); + return RestrictWorkspaceAdminsSetting.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSettingPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSettingPb.java new file mode 100755 index 000000000..75f77f578 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSettingPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RestrictWorkspaceAdminsSettingPb { + @JsonProperty("etag") + private String etag; + + @JsonProperty("restrict_workspace_admins") + private RestrictWorkspaceAdminsMessage restrictWorkspaceAdmins; + + @JsonProperty("setting_name") + private String settingName; + + public RestrictWorkspaceAdminsSettingPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public RestrictWorkspaceAdminsSettingPb setRestrictWorkspaceAdmins( + RestrictWorkspaceAdminsMessage restrictWorkspaceAdmins) { + this.restrictWorkspaceAdmins = restrictWorkspaceAdmins; + return this; + } + + public RestrictWorkspaceAdminsMessage getRestrictWorkspaceAdmins() { + return restrictWorkspaceAdmins; + } + + public RestrictWorkspaceAdminsSettingPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestrictWorkspaceAdminsSettingPb that = (RestrictWorkspaceAdminsSettingPb) o; + return Objects.equals(etag, that.etag) + && Objects.equals(restrictWorkspaceAdmins, that.restrictWorkspaceAdmins) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(etag, restrictWorkspaceAdmins, settingName); + } + + @Override + public String toString() { + return new ToStringer(RestrictWorkspaceAdminsSettingPb.class) + .add("etag", etag) + .add("restrictWorkspaceAdmins", restrictWorkspaceAdmins) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java index 88faa9602..e585cb465 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RevokeTokenRequest.RevokeTokenRequestSerializer.class) +@JsonDeserialize(using = RevokeTokenRequest.RevokeTokenRequestDeserializer.class) public class RevokeTokenRequest { /** The ID of the token to be revoked. */ - @JsonProperty("token_id") private String tokenId; public RevokeTokenRequest setTokenId(String tokenId) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(RevokeTokenRequest.class).add("tokenId", tokenId).toString(); } + + RevokeTokenRequestPb toPb() { + RevokeTokenRequestPb pb = new RevokeTokenRequestPb(); + pb.setTokenId(tokenId); + + return pb; + } + + static RevokeTokenRequest fromPb(RevokeTokenRequestPb pb) { + RevokeTokenRequest model = new RevokeTokenRequest(); + model.setTokenId(pb.getTokenId()); + + return model; + } + + public static class RevokeTokenRequestSerializer extends JsonSerializer { + @Override + public void serialize(RevokeTokenRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RevokeTokenRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RevokeTokenRequestDeserializer extends JsonDeserializer { + @Override + public RevokeTokenRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RevokeTokenRequestPb pb = mapper.readValue(p, RevokeTokenRequestPb.class); + return RevokeTokenRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequestPb.java new file mode 100755 index 000000000..037cfd2e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RevokeTokenRequestPb { + @JsonProperty("token_id") + private String tokenId; + + public RevokeTokenRequestPb setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RevokeTokenRequestPb that = (RevokeTokenRequestPb) o; + return Objects.equals(tokenId, that.tokenId); + } + + @Override + public int hashCode() { + return Objects.hash(tokenId); + } + + @Override + public String toString() { + return new ToStringer(RevokeTokenRequestPb.class).add("tokenId", tokenId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java index 63ac738c7..44ed4fa29 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RevokeTokenResponse.RevokeTokenResponseSerializer.class) +@JsonDeserialize(using = RevokeTokenResponse.RevokeTokenResponseDeserializer.class) public class RevokeTokenResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(RevokeTokenResponse.class).toString(); } + + RevokeTokenResponsePb toPb() { + RevokeTokenResponsePb pb = new RevokeTokenResponsePb(); + + return pb; + } + + static RevokeTokenResponse fromPb(RevokeTokenResponsePb pb) { + RevokeTokenResponse model = new RevokeTokenResponse(); + + return model; + } + + public static class RevokeTokenResponseSerializer extends JsonSerializer { + @Override + public void serialize(RevokeTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RevokeTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RevokeTokenResponseDeserializer + extends JsonDeserializer { + @Override + public RevokeTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RevokeTokenResponsePb pb = mapper.readValue(p, RevokeTokenResponsePb.class); + return RevokeTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponsePb.java new file mode 100755 index 000000000..dc3f03b82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RevokeTokenResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RevokeTokenResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java index 6d4a07fab..66f2b1d8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SetStatusResponse.SetStatusResponseSerializer.class) +@JsonDeserialize(using = SetStatusResponse.SetStatusResponseDeserializer.class) public class SetStatusResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(SetStatusResponse.class).toString(); } + + SetStatusResponsePb toPb() { + SetStatusResponsePb pb = new SetStatusResponsePb(); + + return pb; + } + + static SetStatusResponse fromPb(SetStatusResponsePb pb) { + SetStatusResponse model = new SetStatusResponse(); + + return model; + } + + public static class SetStatusResponseSerializer extends JsonSerializer { + @Override + public void serialize(SetStatusResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetStatusResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetStatusResponseDeserializer extends JsonDeserializer { + @Override + public SetStatusResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetStatusResponsePb pb = mapper.readValue(p, SetStatusResponsePb.class); + return SetStatusResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponsePb.java new file mode 100755 index 000000000..59dd44f36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetStatusResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetStatusResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfig.java index 1dc105082..421a1ef2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfig.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SlackConfig.SlackConfigSerializer.class) +@JsonDeserialize(using = SlackConfig.SlackConfigDeserializer.class) public class SlackConfig { /** [Input-Only] URL for Slack destination. */ - @JsonProperty("url") private String url; /** [Output-Only] Whether URL is set. */ - @JsonProperty("url_set") private Boolean urlSet; public SlackConfig setUrl(String url) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(SlackConfig.class).add("url", url).add("urlSet", urlSet).toString(); } + + SlackConfigPb toPb() { + SlackConfigPb pb = new SlackConfigPb(); + pb.setUrl(url); + pb.setUrlSet(urlSet); + + return pb; + } + + static SlackConfig fromPb(SlackConfigPb pb) { + SlackConfig model = new SlackConfig(); + model.setUrl(pb.getUrl()); + model.setUrlSet(pb.getUrlSet()); + + return model; + } + + public static class SlackConfigSerializer extends JsonSerializer { + @Override + public void serialize(SlackConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SlackConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SlackConfigDeserializer extends JsonDeserializer { + @Override + public SlackConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SlackConfigPb pb = mapper.readValue(p, SlackConfigPb.class); + return SlackConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfigPb.java new file mode 100755 index 000000000..6ffb469f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SlackConfigPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SlackConfigPb { + @JsonProperty("url") + private String url; + + @JsonProperty("url_set") + private Boolean urlSet; + + public SlackConfigPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + public SlackConfigPb setUrlSet(Boolean urlSet) { + this.urlSet = urlSet; + return this; + } + + public Boolean getUrlSet() { + return urlSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SlackConfigPb that = (SlackConfigPb) o; + return Objects.equals(url, that.url) && Objects.equals(urlSet, that.urlSet); + } + + @Override + public int hashCode() { + return Objects.hash(url, urlSet); + } + + @Override + public String toString() { + return new ToStringer(SlackConfigPb.class).add("url", url).add("urlSet", urlSet).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownload.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownload.java index b15b7f669..4a7f0cc97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownload.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownload.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SqlResultsDownload.SqlResultsDownloadSerializer.class) +@JsonDeserialize(using = SqlResultsDownload.SqlResultsDownloadDeserializer.class) public class SqlResultsDownload { /** */ - @JsonProperty("boolean_val") private BooleanMessage booleanVal; /** @@ -21,7 +31,6 @@ public class SqlResultsDownload { * etag from a GET request, and pass it with the PATCH request to identify the setting version you * are updating. */ - @JsonProperty("etag") private String etag; /** @@ -30,7 +39,6 @@ public class SqlResultsDownload { * respected instead. Setting name is required to be 'default' if the setting only has one * instance per workspace. */ - @JsonProperty("setting_name") private String settingName; public SqlResultsDownload setBooleanVal(BooleanMessage booleanVal) { @@ -83,4 +91,42 @@ public String toString() { .add("settingName", settingName) .toString(); } + + SqlResultsDownloadPb toPb() { + SqlResultsDownloadPb pb = new SqlResultsDownloadPb(); + pb.setBooleanVal(booleanVal); + pb.setEtag(etag); + pb.setSettingName(settingName); + + return pb; + } + + static SqlResultsDownload fromPb(SqlResultsDownloadPb pb) { + SqlResultsDownload model = new SqlResultsDownload(); + model.setBooleanVal(pb.getBooleanVal()); + model.setEtag(pb.getEtag()); + model.setSettingName(pb.getSettingName()); + + return model; + } + + public static class SqlResultsDownloadSerializer extends JsonSerializer { + @Override + public void serialize(SqlResultsDownload value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SqlResultsDownloadPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SqlResultsDownloadDeserializer extends JsonDeserializer { + @Override + public SqlResultsDownload deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SqlResultsDownloadPb pb = mapper.readValue(p, SqlResultsDownloadPb.class); + return SqlResultsDownload.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadImpl.java index db09dc70e..5e595d2eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadImpl.java @@ -21,7 +21,7 @@ public DeleteSqlResultsDownloadResponse delete(DeleteSqlResultsDownloadRequest r String path = "/api/2.0/settings/types/sql_results_download/names/default"; try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteSqlResultsDownloadResponse.class); } catch (IOException e) { @@ -34,7 +34,7 @@ public SqlResultsDownload get(GetSqlResultsDownloadRequest request) { String path = "/api/2.0/settings/types/sql_results_download/names/default"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, SqlResultsDownload.class); } catch (IOException e) { @@ -47,7 +47,7 @@ public SqlResultsDownload update(UpdateSqlResultsDownloadRequest request) { String path = "/api/2.0/settings/types/sql_results_download/names/default"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SqlResultsDownload.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadPb.java new file mode 100755 index 000000000..1303a1d4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SqlResultsDownloadPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SqlResultsDownloadPb { + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + @JsonProperty("etag") + private String etag; + + @JsonProperty("setting_name") + private String settingName; + + public SqlResultsDownloadPb setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public SqlResultsDownloadPb setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public SqlResultsDownloadPb setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SqlResultsDownloadPb that = (SqlResultsDownloadPb) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(SqlResultsDownloadPb.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessage.java index bc340ea61..1ae27560a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessage.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StringMessage.StringMessageSerializer.class) +@JsonDeserialize(using = StringMessage.StringMessageDeserializer.class) public class StringMessage { /** Represents a generic string value. */ - @JsonProperty("value") private String value; public StringMessage setValue(String value) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(StringMessage.class).add("value", value).toString(); } + + StringMessagePb toPb() { + StringMessagePb pb = new StringMessagePb(); + pb.setValue(value); + + return pb; + } + + static StringMessage fromPb(StringMessagePb pb) { + StringMessage model = new StringMessage(); + model.setValue(pb.getValue()); + + return model; + } + + public static class StringMessageSerializer extends JsonSerializer { + @Override + public void serialize(StringMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StringMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StringMessageDeserializer extends JsonDeserializer { + @Override + public StringMessage deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StringMessagePb pb = mapper.readValue(p, StringMessagePb.class); + return StringMessage.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessagePb.java new file mode 100755 index 000000000..a3b74ce01 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/StringMessagePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StringMessagePb { + @JsonProperty("value") + private String value; + + public StringMessagePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StringMessagePb that = (StringMessagePb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(StringMessagePb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java index 692f87aee..06408a550 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenAccessControlRequest.TokenAccessControlRequestSerializer.class) +@JsonDeserialize(using = TokenAccessControlRequest.TokenAccessControlRequestDeserializer.class) public class TokenAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public TokenAccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,47 @@ public String toString() { .add("userName", userName) .toString(); } + + TokenAccessControlRequestPb toPb() { + TokenAccessControlRequestPb pb = new TokenAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static TokenAccessControlRequest fromPb(TokenAccessControlRequestPb pb) { + TokenAccessControlRequest model = new TokenAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class TokenAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + TokenAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public TokenAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenAccessControlRequestPb pb = mapper.readValue(p, TokenAccessControlRequestPb.class); + return TokenAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequestPb.java new file mode 100755 index 000000000..35cca83a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private TokenPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public TokenAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public TokenAccessControlRequestPb setPermissionLevel(TokenPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public TokenPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public TokenAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public TokenAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenAccessControlRequestPb that = (TokenAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(TokenAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponse.java index 5b1fc025a..2ebabd834 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TokenAccessControlResponse.TokenAccessControlResponseSerializer.class) +@JsonDeserialize(using = TokenAccessControlResponse.TokenAccessControlResponseDeserializer.class) public class TokenAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public TokenAccessControlResponse setAllPermissions(Collection allPermissions) { @@ -102,4 +108,49 @@ public String toString() { .add("userName", userName) .toString(); } + + TokenAccessControlResponsePb toPb() { + TokenAccessControlResponsePb pb = new TokenAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static TokenAccessControlResponse fromPb(TokenAccessControlResponsePb pb) { + TokenAccessControlResponse model = new TokenAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class TokenAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + TokenAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public TokenAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenAccessControlResponsePb pb = mapper.readValue(p, TokenAccessControlResponsePb.class); + return TokenAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponsePb.java new file mode 100755 index 000000000..604976c6b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TokenAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public TokenAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public TokenAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public TokenAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public TokenAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public TokenAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenAccessControlResponsePb that = (TokenAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(TokenAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java index 08e227383..350d62871 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java @@ -4,45 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenInfo.TokenInfoSerializer.class) +@JsonDeserialize(using = TokenInfo.TokenInfoDeserializer.class) public class TokenInfo { /** Comment that describes the purpose of the token, specified by the token creator. */ - @JsonProperty("comment") private String comment; /** User ID of the user that created the token. */ - @JsonProperty("created_by_id") private Long createdById; /** Username of the user that created the token. */ - @JsonProperty("created_by_username") private String createdByUsername; /** Timestamp when the token was created. */ - @JsonProperty("creation_time") private Long creationTime; /** Timestamp when the token expires. */ - @JsonProperty("expiry_time") private Long expiryTime; /** Approximate timestamp for the day the token was last used. Accurate up to 1 day. */ - @JsonProperty("last_used_day") private Long lastUsedDay; /** User ID of the user that owns the token. */ - @JsonProperty("owner_id") private Long ownerId; /** ID of the token. */ - @JsonProperty("token_id") private String tokenId; /** If applicable, the ID of the workspace that the token was created in. */ - @JsonProperty("workspace_id") private Long workspaceId; public TokenInfo setComment(String comment) { @@ -170,4 +172,53 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + TokenInfoPb toPb() { + TokenInfoPb pb = new TokenInfoPb(); + pb.setComment(comment); + pb.setCreatedById(createdById); + pb.setCreatedByUsername(createdByUsername); + pb.setCreationTime(creationTime); + pb.setExpiryTime(expiryTime); + pb.setLastUsedDay(lastUsedDay); + pb.setOwnerId(ownerId); + pb.setTokenId(tokenId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static TokenInfo fromPb(TokenInfoPb pb) { + TokenInfo model = new TokenInfo(); + model.setComment(pb.getComment()); + model.setCreatedById(pb.getCreatedById()); + model.setCreatedByUsername(pb.getCreatedByUsername()); + model.setCreationTime(pb.getCreationTime()); + model.setExpiryTime(pb.getExpiryTime()); + model.setLastUsedDay(pb.getLastUsedDay()); + model.setOwnerId(pb.getOwnerId()); + model.setTokenId(pb.getTokenId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class TokenInfoSerializer extends JsonSerializer { + @Override + public void serialize(TokenInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenInfoDeserializer extends JsonDeserializer { + @Override + public TokenInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenInfoPb pb = mapper.readValue(p, TokenInfoPb.class); + return TokenInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfoPb.java new file mode 100755 index 000000000..0f7c93dd8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfoPb.java @@ -0,0 +1,164 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_by_id") + private Long createdById; + + @JsonProperty("created_by_username") + private String createdByUsername; + + @JsonProperty("creation_time") + private Long creationTime; + + @JsonProperty("expiry_time") + private Long expiryTime; + + @JsonProperty("last_used_day") + private Long lastUsedDay; + + @JsonProperty("owner_id") + private Long ownerId; + + @JsonProperty("token_id") + private String tokenId; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public TokenInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public TokenInfoPb setCreatedById(Long createdById) { + this.createdById = createdById; + return this; + } + + public Long getCreatedById() { + return createdById; + } + + public TokenInfoPb setCreatedByUsername(String createdByUsername) { + this.createdByUsername = createdByUsername; + return this; + } + + public String getCreatedByUsername() { + return createdByUsername; + } + + public TokenInfoPb setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public TokenInfoPb setExpiryTime(Long expiryTime) { + this.expiryTime = expiryTime; + return this; + } + + public Long getExpiryTime() { + return expiryTime; + } + + public TokenInfoPb setLastUsedDay(Long lastUsedDay) { + this.lastUsedDay = lastUsedDay; + return this; + } + + public Long getLastUsedDay() { + return lastUsedDay; + } + + public TokenInfoPb setOwnerId(Long ownerId) { + this.ownerId = ownerId; + return this; + } + + public Long getOwnerId() { + return ownerId; + } + + public TokenInfoPb setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + public TokenInfoPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenInfoPb that = (TokenInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdById, that.createdById) + && Objects.equals(createdByUsername, that.createdByUsername) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(expiryTime, that.expiryTime) + && Objects.equals(lastUsedDay, that.lastUsedDay) + && Objects.equals(ownerId, that.ownerId) + && Objects.equals(tokenId, that.tokenId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + createdById, + createdByUsername, + creationTime, + expiryTime, + lastUsedDay, + ownerId, + tokenId, + workspaceId); + } + + @Override + public String toString() { + return new ToStringer(TokenInfoPb.class) + .add("comment", comment) + .add("createdById", createdById) + .add("createdByUsername", createdByUsername) + .add("creationTime", creationTime) + .add("expiryTime", expiryTime) + .add("lastUsedDay", lastUsedDay) + .add("ownerId", ownerId) + .add("tokenId", tokenId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java index 8264976e5..929923535 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java @@ -21,7 +21,7 @@ public CreateOboTokenResponse createOboToken(CreateOboTokenRequest request) { String path = "/api/2.0/token-management/on-behalf-of/tokens"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateOboTokenResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public GetTokenResponse get(GetTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetTokenResponse.class); } catch (IOException e) { @@ -84,7 +84,7 @@ public ListTokensResponse list(ListTokenManagementRequest request) { String path = "/api/2.0/token-management/tokens"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListTokensResponse.class); } catch (IOException e) { @@ -97,7 +97,7 @@ public TokenPermissions setPermissions(TokenPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/tokens"; try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TokenPermissions.class); @@ -111,7 +111,7 @@ public TokenPermissions updatePermissions(TokenPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/tokens"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, TokenPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java index 013077476..c2d05bb2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TokenPermission.TokenPermissionSerializer.class) +@JsonDeserialize(using = TokenPermission.TokenPermissionDeserializer.class) public class TokenPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; public TokenPermission setInherited(Boolean inherited) { @@ -72,4 +80,42 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + TokenPermissionPb toPb() { + TokenPermissionPb pb = new TokenPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static TokenPermission fromPb(TokenPermissionPb pb) { + TokenPermission model = new TokenPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class TokenPermissionSerializer extends JsonSerializer { + @Override + public void serialize(TokenPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenPermissionDeserializer extends JsonDeserializer { + @Override + public TokenPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenPermissionPb pb = mapper.readValue(p, TokenPermissionPb.class); + return TokenPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionPb.java new file mode 100755 index 000000000..a15571657 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TokenPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private TokenPermissionLevel permissionLevel; + + public TokenPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public TokenPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public TokenPermissionPb setPermissionLevel(TokenPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public TokenPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenPermissionPb that = (TokenPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(TokenPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissions.java index 349d1258a..b5eeca0f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TokenPermissions.TokenPermissionsSerializer.class) +@JsonDeserialize(using = TokenPermissions.TokenPermissionsDeserializer.class) public class TokenPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public TokenPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + TokenPermissionsPb toPb() { + TokenPermissionsPb pb = new TokenPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static TokenPermissions fromPb(TokenPermissionsPb pb) { + TokenPermissions model = new TokenPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class TokenPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(TokenPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenPermissionsDeserializer extends JsonDeserializer { + @Override + public TokenPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenPermissionsPb pb = mapper.readValue(p, TokenPermissionsPb.class); + return TokenPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java index 1ac62cbf5..79be0f82e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TokenPermissionsDescription.TokenPermissionsDescriptionSerializer.class) +@JsonDeserialize(using = TokenPermissionsDescription.TokenPermissionsDescriptionDeserializer.class) public class TokenPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; public TokenPermissionsDescription setDescription(String description) { @@ -56,4 +65,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + TokenPermissionsDescriptionPb toPb() { + TokenPermissionsDescriptionPb pb = new TokenPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static TokenPermissionsDescription fromPb(TokenPermissionsDescriptionPb pb) { + TokenPermissionsDescription model = new TokenPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class TokenPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + TokenPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public TokenPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenPermissionsDescriptionPb pb = mapper.readValue(p, TokenPermissionsDescriptionPb.class); + return TokenPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescriptionPb.java new file mode 100755 index 000000000..341930af7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TokenPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private TokenPermissionLevel permissionLevel; + + public TokenPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public TokenPermissionsDescriptionPb setPermissionLevel(TokenPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public TokenPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenPermissionsDescriptionPb that = (TokenPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(TokenPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsPb.java new file mode 100755 index 000000000..aca427002 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TokenPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public TokenPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public TokenPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public TokenPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenPermissionsPb that = (TokenPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(TokenPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequest.java index 43b002de5..d3bf23672 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = TokenPermissionsRequest.TokenPermissionsRequestSerializer.class) +@JsonDeserialize(using = TokenPermissionsRequest.TokenPermissionsRequestDeserializer.class) public class TokenPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; public TokenPermissionsRequest setAccessControlList( @@ -43,4 +53,41 @@ public String toString() { .add("accessControlList", accessControlList) .toString(); } + + TokenPermissionsRequestPb toPb() { + TokenPermissionsRequestPb pb = new TokenPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + + return pb; + } + + static TokenPermissionsRequest fromPb(TokenPermissionsRequestPb pb) { + TokenPermissionsRequest model = new TokenPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + + return model; + } + + public static class TokenPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + TokenPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TokenPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TokenPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public TokenPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TokenPermissionsRequestPb pb = mapper.readValue(p, TokenPermissionsRequestPb.class); + return TokenPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequestPb.java new file mode 100755 index 000000000..04cc960cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsRequestPb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TokenPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + public TokenPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenPermissionsRequestPb that = (TokenPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList); + } + + @Override + public String toString() { + return new ToStringer(TokenPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java index ed323fd5b..04978b7b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java @@ -21,7 +21,7 @@ public CreateTokenResponse create(CreateTokenRequest request) { String path = "/api/2.0/token/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateTokenResponse.class); @@ -35,7 +35,7 @@ public void delete(RevokeTokenRequest request) { String path = "/api/2.0/token/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, RevokeTokenResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java index 60cb583c2..31fb0ad6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateAccountIpAccessEnableRequest.UpdateAccountIpAccessEnableRequestSerializer.class) +@JsonDeserialize( + using = UpdateAccountIpAccessEnableRequest.UpdateAccountIpAccessEnableRequestDeserializer.class) public class UpdateAccountIpAccessEnableRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateAccountIpAccessEnableRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private AccountIpAccessEnable setting; public UpdateAccountIpAccessEnableRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateAccountIpAccessEnableRequestPb toPb() { + UpdateAccountIpAccessEnableRequestPb pb = new UpdateAccountIpAccessEnableRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateAccountIpAccessEnableRequest fromPb(UpdateAccountIpAccessEnableRequestPb pb) { + UpdateAccountIpAccessEnableRequest model = new UpdateAccountIpAccessEnableRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateAccountIpAccessEnableRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAccountIpAccessEnableRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAccountIpAccessEnableRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAccountIpAccessEnableRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAccountIpAccessEnableRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAccountIpAccessEnableRequestPb pb = + mapper.readValue(p, UpdateAccountIpAccessEnableRequestPb.class); + return UpdateAccountIpAccessEnableRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequestPb.java new file mode 100755 index 000000000..b25043012 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateAccountIpAccessEnableRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private AccountIpAccessEnable setting; + + public UpdateAccountIpAccessEnableRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAccountIpAccessEnableRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAccountIpAccessEnableRequestPb setSetting(AccountIpAccessEnable setting) { + this.setting = setting; + return this; + } + + public AccountIpAccessEnable getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAccountIpAccessEnableRequestPb that = (UpdateAccountIpAccessEnableRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAccountIpAccessEnableRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java index e41d32f7f..7d5334e85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest + .UpdateAibiDashboardEmbeddingAccessPolicySettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest + .UpdateAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer.class) public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private AibiDashboardEmbeddingAccessPolicySetting setting; public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setAllowMissing( @@ -85,4 +99,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb toPb() { + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + new UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateAibiDashboardEmbeddingAccessPolicySettingRequest fromPb( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb pb) { + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest model = + new UpdateAibiDashboardEmbeddingAccessPolicySettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateAibiDashboardEmbeddingAccessPolicySettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAibiDashboardEmbeddingAccessPolicySettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb pb = + mapper.readValue(p, UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb.class); + return UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb.java new file mode 100755 index 000000000..7b883791c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private AibiDashboardEmbeddingAccessPolicySetting setting; + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb setSetting( + AibiDashboardEmbeddingAccessPolicySetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingAccessPolicySetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb that = + (UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingAccessPolicySettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java index a0696f850..80e8b413f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest + .UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest + .UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer.class) public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private AibiDashboardEmbeddingApprovedDomainsSetting setting; public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setAllowMissing( @@ -85,4 +99,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb toPb() { + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + new UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest fromPb( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb) { + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest model = + new UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb pb = + mapper.readValue(p, UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class); + return UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java new file mode 100755 index 000000000..52b6c9c71 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private AibiDashboardEmbeddingApprovedDomainsSetting setting; + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb setFieldMask( + String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb setSetting( + AibiDashboardEmbeddingApprovedDomainsSetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb that = + (UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java index 4231071d1..8275d53b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateAutomaticClusterUpdateSettingRequest + .UpdateAutomaticClusterUpdateSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateAutomaticClusterUpdateSettingRequest + .UpdateAutomaticClusterUpdateSettingRequestDeserializer.class) public class UpdateAutomaticClusterUpdateSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateAutomaticClusterUpdateSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private AutomaticClusterUpdateSetting setting; public UpdateAutomaticClusterUpdateSettingRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateAutomaticClusterUpdateSettingRequestPb toPb() { + UpdateAutomaticClusterUpdateSettingRequestPb pb = + new UpdateAutomaticClusterUpdateSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateAutomaticClusterUpdateSettingRequest fromPb( + UpdateAutomaticClusterUpdateSettingRequestPb pb) { + UpdateAutomaticClusterUpdateSettingRequest model = + new UpdateAutomaticClusterUpdateSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateAutomaticClusterUpdateSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAutomaticClusterUpdateSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateAutomaticClusterUpdateSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAutomaticClusterUpdateSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAutomaticClusterUpdateSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAutomaticClusterUpdateSettingRequestPb pb = + mapper.readValue(p, UpdateAutomaticClusterUpdateSettingRequestPb.class); + return UpdateAutomaticClusterUpdateSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequestPb.java new file mode 100755 index 000000000..59cda7fbb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateAutomaticClusterUpdateSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private AutomaticClusterUpdateSetting setting; + + public UpdateAutomaticClusterUpdateSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAutomaticClusterUpdateSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAutomaticClusterUpdateSettingRequestPb setSetting( + AutomaticClusterUpdateSetting setting) { + this.setting = setting; + return this; + } + + public AutomaticClusterUpdateSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAutomaticClusterUpdateSettingRequestPb that = + (UpdateAutomaticClusterUpdateSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAutomaticClusterUpdateSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java index 6eb4c62b2..37902bcd5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateComplianceSecurityProfileSettingRequest + .UpdateComplianceSecurityProfileSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateComplianceSecurityProfileSettingRequest + .UpdateComplianceSecurityProfileSettingRequestDeserializer.class) public class UpdateComplianceSecurityProfileSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateComplianceSecurityProfileSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private ComplianceSecurityProfileSetting setting; public UpdateComplianceSecurityProfileSettingRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateComplianceSecurityProfileSettingRequestPb toPb() { + UpdateComplianceSecurityProfileSettingRequestPb pb = + new UpdateComplianceSecurityProfileSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateComplianceSecurityProfileSettingRequest fromPb( + UpdateComplianceSecurityProfileSettingRequestPb pb) { + UpdateComplianceSecurityProfileSettingRequest model = + new UpdateComplianceSecurityProfileSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateComplianceSecurityProfileSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateComplianceSecurityProfileSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateComplianceSecurityProfileSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateComplianceSecurityProfileSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateComplianceSecurityProfileSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateComplianceSecurityProfileSettingRequestPb pb = + mapper.readValue(p, UpdateComplianceSecurityProfileSettingRequestPb.class); + return UpdateComplianceSecurityProfileSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequestPb.java new file mode 100755 index 000000000..be9867432 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateComplianceSecurityProfileSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private ComplianceSecurityProfileSetting setting; + + public UpdateComplianceSecurityProfileSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateComplianceSecurityProfileSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateComplianceSecurityProfileSettingRequestPb setSetting( + ComplianceSecurityProfileSetting setting) { + this.setting = setting; + return this; + } + + public ComplianceSecurityProfileSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateComplianceSecurityProfileSettingRequestPb that = + (UpdateComplianceSecurityProfileSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateComplianceSecurityProfileSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java index a1243c1cd..bd79a4ee2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateCspEnablementAccountSettingRequest.UpdateCspEnablementAccountSettingRequestSerializer + .class) +@JsonDeserialize( + using = + UpdateCspEnablementAccountSettingRequest + .UpdateCspEnablementAccountSettingRequestDeserializer.class) public class UpdateCspEnablementAccountSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateCspEnablementAccountSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private CspEnablementAccountSetting setting; public UpdateCspEnablementAccountSettingRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +96,50 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateCspEnablementAccountSettingRequestPb toPb() { + UpdateCspEnablementAccountSettingRequestPb pb = + new UpdateCspEnablementAccountSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateCspEnablementAccountSettingRequest fromPb( + UpdateCspEnablementAccountSettingRequestPb pb) { + UpdateCspEnablementAccountSettingRequest model = new UpdateCspEnablementAccountSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateCspEnablementAccountSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCspEnablementAccountSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateCspEnablementAccountSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCspEnablementAccountSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateCspEnablementAccountSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCspEnablementAccountSettingRequestPb pb = + mapper.readValue(p, UpdateCspEnablementAccountSettingRequestPb.class); + return UpdateCspEnablementAccountSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequestPb.java new file mode 100755 index 000000000..9087c4879 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateCspEnablementAccountSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private CspEnablementAccountSetting setting; + + public UpdateCspEnablementAccountSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateCspEnablementAccountSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateCspEnablementAccountSettingRequestPb setSetting( + CspEnablementAccountSetting setting) { + this.setting = setting; + return this; + } + + public CspEnablementAccountSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCspEnablementAccountSettingRequestPb that = + (UpdateCspEnablementAccountSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateCspEnablementAccountSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java index 37613e037..d9e3eb74f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateDashboardEmailSubscriptionsRequest.UpdateDashboardEmailSubscriptionsRequestSerializer + .class) +@JsonDeserialize( + using = + UpdateDashboardEmailSubscriptionsRequest + .UpdateDashboardEmailSubscriptionsRequestDeserializer.class) public class UpdateDashboardEmailSubscriptionsRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateDashboardEmailSubscriptionsRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private DashboardEmailSubscriptions setting; public UpdateDashboardEmailSubscriptionsRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +96,50 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateDashboardEmailSubscriptionsRequestPb toPb() { + UpdateDashboardEmailSubscriptionsRequestPb pb = + new UpdateDashboardEmailSubscriptionsRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateDashboardEmailSubscriptionsRequest fromPb( + UpdateDashboardEmailSubscriptionsRequestPb pb) { + UpdateDashboardEmailSubscriptionsRequest model = new UpdateDashboardEmailSubscriptionsRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateDashboardEmailSubscriptionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDashboardEmailSubscriptionsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateDashboardEmailSubscriptionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDashboardEmailSubscriptionsRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDashboardEmailSubscriptionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDashboardEmailSubscriptionsRequestPb pb = + mapper.readValue(p, UpdateDashboardEmailSubscriptionsRequestPb.class); + return UpdateDashboardEmailSubscriptionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequestPb.java new file mode 100755 index 000000000..305d86019 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateDashboardEmailSubscriptionsRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private DashboardEmailSubscriptions setting; + + public UpdateDashboardEmailSubscriptionsRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDashboardEmailSubscriptionsRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDashboardEmailSubscriptionsRequestPb setSetting( + DashboardEmailSubscriptions setting) { + this.setting = setting; + return this; + } + + public DashboardEmailSubscriptions getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDashboardEmailSubscriptionsRequestPb that = + (UpdateDashboardEmailSubscriptionsRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDashboardEmailSubscriptionsRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java index 3e26425dc..973d77961 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java @@ -4,14 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateDefaultNamespaceSettingRequest.UpdateDefaultNamespaceSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateDefaultNamespaceSettingRequest.UpdateDefaultNamespaceSettingRequestDeserializer.class) public class UpdateDefaultNamespaceSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,7 +39,6 @@ public class UpdateDefaultNamespaceSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** @@ -37,7 +50,6 @@ public class UpdateDefaultNamespaceSettingRequest { * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only * applies when using Unity Catalog-enabled compute. */ - @JsonProperty("setting") private DefaultNamespaceSetting setting; public UpdateDefaultNamespaceSettingRequest setAllowMissing(Boolean allowMissing) { @@ -90,4 +102,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateDefaultNamespaceSettingRequestPb toPb() { + UpdateDefaultNamespaceSettingRequestPb pb = new UpdateDefaultNamespaceSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateDefaultNamespaceSettingRequest fromPb(UpdateDefaultNamespaceSettingRequestPb pb) { + UpdateDefaultNamespaceSettingRequest model = new UpdateDefaultNamespaceSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateDefaultNamespaceSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDefaultNamespaceSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDefaultNamespaceSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDefaultNamespaceSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDefaultNamespaceSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDefaultNamespaceSettingRequestPb pb = + mapper.readValue(p, UpdateDefaultNamespaceSettingRequestPb.class); + return UpdateDefaultNamespaceSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequestPb.java new file mode 100755 index 000000000..d333f09b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateDefaultNamespaceSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private DefaultNamespaceSetting setting; + + public UpdateDefaultNamespaceSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDefaultNamespaceSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDefaultNamespaceSettingRequestPb setSetting(DefaultNamespaceSetting setting) { + this.setting = setting; + return this; + } + + public DefaultNamespaceSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDefaultNamespaceSettingRequestPb that = (UpdateDefaultNamespaceSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDefaultNamespaceSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java index 92ecb6463..6c072b9f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateDisableLegacyAccessRequest.UpdateDisableLegacyAccessRequestSerializer.class) +@JsonDeserialize( + using = UpdateDisableLegacyAccessRequest.UpdateDisableLegacyAccessRequestDeserializer.class) public class UpdateDisableLegacyAccessRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateDisableLegacyAccessRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private DisableLegacyAccess setting; public UpdateDisableLegacyAccessRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateDisableLegacyAccessRequestPb toPb() { + UpdateDisableLegacyAccessRequestPb pb = new UpdateDisableLegacyAccessRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateDisableLegacyAccessRequest fromPb(UpdateDisableLegacyAccessRequestPb pb) { + UpdateDisableLegacyAccessRequest model = new UpdateDisableLegacyAccessRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateDisableLegacyAccessRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDisableLegacyAccessRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDisableLegacyAccessRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDisableLegacyAccessRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDisableLegacyAccessRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDisableLegacyAccessRequestPb pb = + mapper.readValue(p, UpdateDisableLegacyAccessRequestPb.class); + return UpdateDisableLegacyAccessRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequestPb.java new file mode 100755 index 000000000..765435d85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateDisableLegacyAccessRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private DisableLegacyAccess setting; + + public UpdateDisableLegacyAccessRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDisableLegacyAccessRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDisableLegacyAccessRequestPb setSetting(DisableLegacyAccess setting) { + this.setting = setting; + return this; + } + + public DisableLegacyAccess getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDisableLegacyAccessRequestPb that = (UpdateDisableLegacyAccessRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDisableLegacyAccessRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java index 9859a2ade..9acc943fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateDisableLegacyDbfsRequest.UpdateDisableLegacyDbfsRequestSerializer.class) +@JsonDeserialize( + using = UpdateDisableLegacyDbfsRequest.UpdateDisableLegacyDbfsRequestDeserializer.class) public class UpdateDisableLegacyDbfsRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateDisableLegacyDbfsRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private DisableLegacyDbfs setting; public UpdateDisableLegacyDbfsRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateDisableLegacyDbfsRequestPb toPb() { + UpdateDisableLegacyDbfsRequestPb pb = new UpdateDisableLegacyDbfsRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateDisableLegacyDbfsRequest fromPb(UpdateDisableLegacyDbfsRequestPb pb) { + UpdateDisableLegacyDbfsRequest model = new UpdateDisableLegacyDbfsRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateDisableLegacyDbfsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDisableLegacyDbfsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDisableLegacyDbfsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDisableLegacyDbfsRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDisableLegacyDbfsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDisableLegacyDbfsRequestPb pb = + mapper.readValue(p, UpdateDisableLegacyDbfsRequestPb.class); + return UpdateDisableLegacyDbfsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequestPb.java new file mode 100755 index 000000000..e01e400a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateDisableLegacyDbfsRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private DisableLegacyDbfs setting; + + public UpdateDisableLegacyDbfsRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDisableLegacyDbfsRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDisableLegacyDbfsRequestPb setSetting(DisableLegacyDbfs setting) { + this.setting = setting; + return this; + } + + public DisableLegacyDbfs getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDisableLegacyDbfsRequestPb that = (UpdateDisableLegacyDbfsRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDisableLegacyDbfsRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java index d54d4f516..cb042f91b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateDisableLegacyFeaturesRequest.UpdateDisableLegacyFeaturesRequestSerializer.class) +@JsonDeserialize( + using = UpdateDisableLegacyFeaturesRequest.UpdateDisableLegacyFeaturesRequestDeserializer.class) public class UpdateDisableLegacyFeaturesRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateDisableLegacyFeaturesRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private DisableLegacyFeatures setting; public UpdateDisableLegacyFeaturesRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateDisableLegacyFeaturesRequestPb toPb() { + UpdateDisableLegacyFeaturesRequestPb pb = new UpdateDisableLegacyFeaturesRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateDisableLegacyFeaturesRequest fromPb(UpdateDisableLegacyFeaturesRequestPb pb) { + UpdateDisableLegacyFeaturesRequest model = new UpdateDisableLegacyFeaturesRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateDisableLegacyFeaturesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateDisableLegacyFeaturesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateDisableLegacyFeaturesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateDisableLegacyFeaturesRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateDisableLegacyFeaturesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateDisableLegacyFeaturesRequestPb pb = + mapper.readValue(p, UpdateDisableLegacyFeaturesRequestPb.class); + return UpdateDisableLegacyFeaturesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequestPb.java new file mode 100755 index 000000000..22e8e6a46 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateDisableLegacyFeaturesRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private DisableLegacyFeatures setting; + + public UpdateDisableLegacyFeaturesRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDisableLegacyFeaturesRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDisableLegacyFeaturesRequestPb setSetting(DisableLegacyFeatures setting) { + this.setting = setting; + return this; + } + + public DisableLegacyFeatures getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDisableLegacyFeaturesRequestPb that = (UpdateDisableLegacyFeaturesRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDisableLegacyFeaturesRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java index 4e426ca38..7732840a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateEnableExportNotebookRequest.UpdateEnableExportNotebookRequestSerializer.class) +@JsonDeserialize( + using = UpdateEnableExportNotebookRequest.UpdateEnableExportNotebookRequestDeserializer.class) public class UpdateEnableExportNotebookRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateEnableExportNotebookRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private EnableExportNotebook setting; public UpdateEnableExportNotebookRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateEnableExportNotebookRequestPb toPb() { + UpdateEnableExportNotebookRequestPb pb = new UpdateEnableExportNotebookRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateEnableExportNotebookRequest fromPb(UpdateEnableExportNotebookRequestPb pb) { + UpdateEnableExportNotebookRequest model = new UpdateEnableExportNotebookRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateEnableExportNotebookRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEnableExportNotebookRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateEnableExportNotebookRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEnableExportNotebookRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEnableExportNotebookRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEnableExportNotebookRequestPb pb = + mapper.readValue(p, UpdateEnableExportNotebookRequestPb.class); + return UpdateEnableExportNotebookRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequestPb.java new file mode 100755 index 000000000..8cdfbe046 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateEnableExportNotebookRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private EnableExportNotebook setting; + + public UpdateEnableExportNotebookRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableExportNotebookRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableExportNotebookRequestPb setSetting(EnableExportNotebook setting) { + this.setting = setting; + return this; + } + + public EnableExportNotebook getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableExportNotebookRequestPb that = (UpdateEnableExportNotebookRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableExportNotebookRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java index 008b93554..f8ee6471a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateEnableNotebookTableClipboardRequest + .UpdateEnableNotebookTableClipboardRequestSerializer.class) +@JsonDeserialize( + using = + UpdateEnableNotebookTableClipboardRequest + .UpdateEnableNotebookTableClipboardRequestDeserializer.class) public class UpdateEnableNotebookTableClipboardRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateEnableNotebookTableClipboardRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private EnableNotebookTableClipboard setting; public UpdateEnableNotebookTableClipboardRequest setAllowMissing(Boolean allowMissing) { @@ -83,4 +97,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateEnableNotebookTableClipboardRequestPb toPb() { + UpdateEnableNotebookTableClipboardRequestPb pb = + new UpdateEnableNotebookTableClipboardRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateEnableNotebookTableClipboardRequest fromPb( + UpdateEnableNotebookTableClipboardRequestPb pb) { + UpdateEnableNotebookTableClipboardRequest model = + new UpdateEnableNotebookTableClipboardRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateEnableNotebookTableClipboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEnableNotebookTableClipboardRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateEnableNotebookTableClipboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEnableNotebookTableClipboardRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEnableNotebookTableClipboardRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEnableNotebookTableClipboardRequestPb pb = + mapper.readValue(p, UpdateEnableNotebookTableClipboardRequestPb.class); + return UpdateEnableNotebookTableClipboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequestPb.java new file mode 100755 index 000000000..e73e365aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateEnableNotebookTableClipboardRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private EnableNotebookTableClipboard setting; + + public UpdateEnableNotebookTableClipboardRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableNotebookTableClipboardRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableNotebookTableClipboardRequestPb setSetting( + EnableNotebookTableClipboard setting) { + this.setting = setting; + return this; + } + + public EnableNotebookTableClipboard getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableNotebookTableClipboardRequestPb that = + (UpdateEnableNotebookTableClipboardRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableNotebookTableClipboardRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java index 7311e8398..647f74fea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java @@ -4,14 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateEnableResultsDownloadingRequest.UpdateEnableResultsDownloadingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateEnableResultsDownloadingRequest.UpdateEnableResultsDownloadingRequestDeserializer + .class) public class UpdateEnableResultsDownloadingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +40,9 @@ public class UpdateEnableResultsDownloadingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private EnableResultsDownloading setting; public UpdateEnableResultsDownloadingRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +95,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateEnableResultsDownloadingRequestPb toPb() { + UpdateEnableResultsDownloadingRequestPb pb = new UpdateEnableResultsDownloadingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateEnableResultsDownloadingRequest fromPb(UpdateEnableResultsDownloadingRequestPb pb) { + UpdateEnableResultsDownloadingRequest model = new UpdateEnableResultsDownloadingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateEnableResultsDownloadingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEnableResultsDownloadingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateEnableResultsDownloadingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEnableResultsDownloadingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEnableResultsDownloadingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEnableResultsDownloadingRequestPb pb = + mapper.readValue(p, UpdateEnableResultsDownloadingRequestPb.class); + return UpdateEnableResultsDownloadingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequestPb.java new file mode 100755 index 000000000..78ec3ed64 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateEnableResultsDownloadingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private EnableResultsDownloading setting; + + public UpdateEnableResultsDownloadingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableResultsDownloadingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableResultsDownloadingRequestPb setSetting(EnableResultsDownloading setting) { + this.setting = setting; + return this; + } + + public EnableResultsDownloading getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableResultsDownloadingRequestPb that = (UpdateEnableResultsDownloadingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableResultsDownloadingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java index 9acd11789..3e4c8d6df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateEnhancedSecurityMonitoringSettingRequest + .UpdateEnhancedSecurityMonitoringSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateEnhancedSecurityMonitoringSettingRequest + .UpdateEnhancedSecurityMonitoringSettingRequestDeserializer.class) public class UpdateEnhancedSecurityMonitoringSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateEnhancedSecurityMonitoringSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private EnhancedSecurityMonitoringSetting setting; public UpdateEnhancedSecurityMonitoringSettingRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateEnhancedSecurityMonitoringSettingRequestPb toPb() { + UpdateEnhancedSecurityMonitoringSettingRequestPb pb = + new UpdateEnhancedSecurityMonitoringSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateEnhancedSecurityMonitoringSettingRequest fromPb( + UpdateEnhancedSecurityMonitoringSettingRequestPb pb) { + UpdateEnhancedSecurityMonitoringSettingRequest model = + new UpdateEnhancedSecurityMonitoringSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateEnhancedSecurityMonitoringSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEnhancedSecurityMonitoringSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateEnhancedSecurityMonitoringSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEnhancedSecurityMonitoringSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEnhancedSecurityMonitoringSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEnhancedSecurityMonitoringSettingRequestPb pb = + mapper.readValue(p, UpdateEnhancedSecurityMonitoringSettingRequestPb.class); + return UpdateEnhancedSecurityMonitoringSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequestPb.java new file mode 100755 index 000000000..d1e405cb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateEnhancedSecurityMonitoringSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private EnhancedSecurityMonitoringSetting setting; + + public UpdateEnhancedSecurityMonitoringSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnhancedSecurityMonitoringSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnhancedSecurityMonitoringSettingRequestPb setSetting( + EnhancedSecurityMonitoringSetting setting) { + this.setting = setting; + return this; + } + + public EnhancedSecurityMonitoringSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnhancedSecurityMonitoringSettingRequestPb that = + (UpdateEnhancedSecurityMonitoringSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnhancedSecurityMonitoringSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java index 224635191..e5f648902 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateEsmEnablementAccountSettingRequest.UpdateEsmEnablementAccountSettingRequestSerializer + .class) +@JsonDeserialize( + using = + UpdateEsmEnablementAccountSettingRequest + .UpdateEsmEnablementAccountSettingRequestDeserializer.class) public class UpdateEsmEnablementAccountSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateEsmEnablementAccountSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private EsmEnablementAccountSetting setting; public UpdateEsmEnablementAccountSettingRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +96,50 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateEsmEnablementAccountSettingRequestPb toPb() { + UpdateEsmEnablementAccountSettingRequestPb pb = + new UpdateEsmEnablementAccountSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateEsmEnablementAccountSettingRequest fromPb( + UpdateEsmEnablementAccountSettingRequestPb pb) { + UpdateEsmEnablementAccountSettingRequest model = new UpdateEsmEnablementAccountSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateEsmEnablementAccountSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEsmEnablementAccountSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateEsmEnablementAccountSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEsmEnablementAccountSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEsmEnablementAccountSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEsmEnablementAccountSettingRequestPb pb = + mapper.readValue(p, UpdateEsmEnablementAccountSettingRequestPb.class); + return UpdateEsmEnablementAccountSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequestPb.java new file mode 100755 index 000000000..ffbed3fcc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateEsmEnablementAccountSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private EsmEnablementAccountSetting setting; + + public UpdateEsmEnablementAccountSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEsmEnablementAccountSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEsmEnablementAccountSettingRequestPb setSetting( + EsmEnablementAccountSetting setting) { + this.setting = setting; + return this; + } + + public EsmEnablementAccountSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEsmEnablementAccountSettingRequestPb that = + (UpdateEsmEnablementAccountSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEsmEnablementAccountSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java index 20c17976f..49fe40dd1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java @@ -4,27 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Details required to update an IP access list. */ @Generated +@JsonSerialize(using = UpdateIpAccessList.UpdateIpAccessListSerializer.class) +@JsonDeserialize(using = UpdateIpAccessList.UpdateIpAccessListDeserializer.class) public class UpdateIpAccessList { /** Specifies whether this IP access list is enabled. */ - @JsonProperty("enabled") private Boolean enabled; /** The ID for the corresponding IP access list */ - @JsonIgnore private String ipAccessListId; + private String ipAccessListId; /** */ - @JsonProperty("ip_addresses") private Collection ipAddresses; /** Label for the IP access list. This **cannot** be empty. */ - @JsonProperty("label") private String label; /** @@ -34,7 +41,6 @@ public class UpdateIpAccessList { * or range. IP addresses in the block list are excluded even if they are included in an allow * list. */ - @JsonProperty("list_type") private ListType listType; public UpdateIpAccessList setEnabled(Boolean enabled) { @@ -109,4 +115,46 @@ public String toString() { .add("listType", listType) .toString(); } + + UpdateIpAccessListPb toPb() { + UpdateIpAccessListPb pb = new UpdateIpAccessListPb(); + pb.setEnabled(enabled); + pb.setIpAccessListId(ipAccessListId); + pb.setIpAddresses(ipAddresses); + pb.setLabel(label); + pb.setListType(listType); + + return pb; + } + + static UpdateIpAccessList fromPb(UpdateIpAccessListPb pb) { + UpdateIpAccessList model = new UpdateIpAccessList(); + model.setEnabled(pb.getEnabled()); + model.setIpAccessListId(pb.getIpAccessListId()); + model.setIpAddresses(pb.getIpAddresses()); + model.setLabel(pb.getLabel()); + model.setListType(pb.getListType()); + + return model; + } + + public static class UpdateIpAccessListSerializer extends JsonSerializer { + @Override + public void serialize(UpdateIpAccessList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateIpAccessListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateIpAccessListDeserializer extends JsonDeserializer { + @Override + public UpdateIpAccessList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateIpAccessListPb pb = mapper.readValue(p, UpdateIpAccessListPb.class); + return UpdateIpAccessList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessListPb.java new file mode 100755 index 000000000..ec11844c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessListPb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Details required to update an IP access list. */ +@Generated +class UpdateIpAccessListPb { + @JsonProperty("enabled") + private Boolean enabled; + + @JsonIgnore private String ipAccessListId; + + @JsonProperty("ip_addresses") + private Collection ipAddresses; + + @JsonProperty("label") + private String label; + + @JsonProperty("list_type") + private ListType listType; + + public UpdateIpAccessListPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public UpdateIpAccessListPb setIpAccessListId(String ipAccessListId) { + this.ipAccessListId = ipAccessListId; + return this; + } + + public String getIpAccessListId() { + return ipAccessListId; + } + + public UpdateIpAccessListPb setIpAddresses(Collection ipAddresses) { + this.ipAddresses = ipAddresses; + return this; + } + + public Collection getIpAddresses() { + return ipAddresses; + } + + public UpdateIpAccessListPb setLabel(String label) { + this.label = label; + return this; + } + + public String getLabel() { + return label; + } + + public UpdateIpAccessListPb setListType(ListType listType) { + this.listType = listType; + return this; + } + + public ListType getListType() { + return listType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateIpAccessListPb that = (UpdateIpAccessListPb) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(ipAccessListId, that.ipAccessListId) + && Objects.equals(ipAddresses, that.ipAddresses) + && Objects.equals(label, that.label) + && Objects.equals(listType, that.listType); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, ipAccessListId, ipAddresses, label, listType); + } + + @Override + public String toString() { + return new ToStringer(UpdateIpAccessListPb.class) + .add("enabled", enabled) + .add("ipAccessListId", ipAccessListId) + .add("ipAddresses", ipAddresses) + .add("label", label) + .add("listType", listType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java index 3d8db559a..708fd7ab3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateLlmProxyPartnerPoweredAccountRequest + .UpdateLlmProxyPartnerPoweredAccountRequestSerializer.class) +@JsonDeserialize( + using = + UpdateLlmProxyPartnerPoweredAccountRequest + .UpdateLlmProxyPartnerPoweredAccountRequestDeserializer.class) public class UpdateLlmProxyPartnerPoweredAccountRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateLlmProxyPartnerPoweredAccountRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private LlmProxyPartnerPoweredAccount setting; public UpdateLlmProxyPartnerPoweredAccountRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateLlmProxyPartnerPoweredAccountRequestPb toPb() { + UpdateLlmProxyPartnerPoweredAccountRequestPb pb = + new UpdateLlmProxyPartnerPoweredAccountRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateLlmProxyPartnerPoweredAccountRequest fromPb( + UpdateLlmProxyPartnerPoweredAccountRequestPb pb) { + UpdateLlmProxyPartnerPoweredAccountRequest model = + new UpdateLlmProxyPartnerPoweredAccountRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateLlmProxyPartnerPoweredAccountRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateLlmProxyPartnerPoweredAccountRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateLlmProxyPartnerPoweredAccountRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateLlmProxyPartnerPoweredAccountRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateLlmProxyPartnerPoweredAccountRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateLlmProxyPartnerPoweredAccountRequestPb pb = + mapper.readValue(p, UpdateLlmProxyPartnerPoweredAccountRequestPb.class); + return UpdateLlmProxyPartnerPoweredAccountRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequestPb.java new file mode 100755 index 000000000..c456f1946 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateLlmProxyPartnerPoweredAccountRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private LlmProxyPartnerPoweredAccount setting; + + public UpdateLlmProxyPartnerPoweredAccountRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredAccountRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredAccountRequestPb setSetting( + LlmProxyPartnerPoweredAccount setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredAccount getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredAccountRequestPb that = + (UpdateLlmProxyPartnerPoweredAccountRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredAccountRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java index c644fc32c..671af4419 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateLlmProxyPartnerPoweredEnforceRequest + .UpdateLlmProxyPartnerPoweredEnforceRequestSerializer.class) +@JsonDeserialize( + using = + UpdateLlmProxyPartnerPoweredEnforceRequest + .UpdateLlmProxyPartnerPoweredEnforceRequestDeserializer.class) public class UpdateLlmProxyPartnerPoweredEnforceRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateLlmProxyPartnerPoweredEnforceRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private LlmProxyPartnerPoweredEnforce setting; public UpdateLlmProxyPartnerPoweredEnforceRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateLlmProxyPartnerPoweredEnforceRequestPb toPb() { + UpdateLlmProxyPartnerPoweredEnforceRequestPb pb = + new UpdateLlmProxyPartnerPoweredEnforceRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateLlmProxyPartnerPoweredEnforceRequest fromPb( + UpdateLlmProxyPartnerPoweredEnforceRequestPb pb) { + UpdateLlmProxyPartnerPoweredEnforceRequest model = + new UpdateLlmProxyPartnerPoweredEnforceRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateLlmProxyPartnerPoweredEnforceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateLlmProxyPartnerPoweredEnforceRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateLlmProxyPartnerPoweredEnforceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateLlmProxyPartnerPoweredEnforceRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateLlmProxyPartnerPoweredEnforceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateLlmProxyPartnerPoweredEnforceRequestPb pb = + mapper.readValue(p, UpdateLlmProxyPartnerPoweredEnforceRequestPb.class); + return UpdateLlmProxyPartnerPoweredEnforceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequestPb.java new file mode 100755 index 000000000..139049079 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateLlmProxyPartnerPoweredEnforceRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private LlmProxyPartnerPoweredEnforce setting; + + public UpdateLlmProxyPartnerPoweredEnforceRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredEnforceRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredEnforceRequestPb setSetting( + LlmProxyPartnerPoweredEnforce setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredEnforce getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredEnforceRequestPb that = + (UpdateLlmProxyPartnerPoweredEnforceRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredEnforceRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java index 52f1bb85a..a92f12bb2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateLlmProxyPartnerPoweredWorkspaceRequest + .UpdateLlmProxyPartnerPoweredWorkspaceRequestSerializer.class) +@JsonDeserialize( + using = + UpdateLlmProxyPartnerPoweredWorkspaceRequest + .UpdateLlmProxyPartnerPoweredWorkspaceRequestDeserializer.class) public class UpdateLlmProxyPartnerPoweredWorkspaceRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateLlmProxyPartnerPoweredWorkspaceRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private LlmProxyPartnerPoweredWorkspace setting; public UpdateLlmProxyPartnerPoweredWorkspaceRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb toPb() { + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb pb = + new UpdateLlmProxyPartnerPoweredWorkspaceRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateLlmProxyPartnerPoweredWorkspaceRequest fromPb( + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb pb) { + UpdateLlmProxyPartnerPoweredWorkspaceRequest model = + new UpdateLlmProxyPartnerPoweredWorkspaceRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateLlmProxyPartnerPoweredWorkspaceRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateLlmProxyPartnerPoweredWorkspaceRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateLlmProxyPartnerPoweredWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateLlmProxyPartnerPoweredWorkspaceRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb pb = + mapper.readValue(p, UpdateLlmProxyPartnerPoweredWorkspaceRequestPb.class); + return UpdateLlmProxyPartnerPoweredWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequestPb.java new file mode 100755 index 000000000..e94868f4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateLlmProxyPartnerPoweredWorkspaceRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private LlmProxyPartnerPoweredWorkspace setting; + + public UpdateLlmProxyPartnerPoweredWorkspaceRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredWorkspaceRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredWorkspaceRequestPb setSetting( + LlmProxyPartnerPoweredWorkspace setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredWorkspace getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredWorkspaceRequestPb that = + (UpdateLlmProxyPartnerPoweredWorkspaceRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredWorkspaceRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java new file mode 100755 index 000000000..9a5b7a30e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java @@ -0,0 +1,160 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +/** Update a private endpoint rule */ +@Generated +@JsonSerialize( + using = UpdateNccPrivateEndpointRuleRequest.UpdateNccPrivateEndpointRuleRequestSerializer.class) +@JsonDeserialize( + using = + UpdateNccPrivateEndpointRuleRequest.UpdateNccPrivateEndpointRuleRequestDeserializer.class) +public class UpdateNccPrivateEndpointRuleRequest { + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ + private String networkConnectivityConfigId; + + /** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ + private UpdatePrivateEndpointRule privateEndpointRule; + + /** Your private endpoint rule ID. */ + private String privateEndpointRuleId; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + private String updateMask; + + public UpdateNccPrivateEndpointRuleRequest setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRule( + UpdatePrivateEndpointRule privateEndpointRule) { + this.privateEndpointRule = privateEndpointRule; + return this; + } + + public UpdatePrivateEndpointRule getPrivateEndpointRule() { + return privateEndpointRule; + } + + public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRuleId( + String privateEndpointRuleId) { + this.privateEndpointRuleId = privateEndpointRuleId; + return this; + } + + public String getPrivateEndpointRuleId() { + return privateEndpointRuleId; + } + + public UpdateNccPrivateEndpointRuleRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateNccPrivateEndpointRuleRequest that = (UpdateNccPrivateEndpointRuleRequest) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(privateEndpointRule, that.privateEndpointRule) + && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash( + networkConnectivityConfigId, privateEndpointRule, privateEndpointRuleId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateNccPrivateEndpointRuleRequest.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("privateEndpointRule", privateEndpointRule) + .add("privateEndpointRuleId", privateEndpointRuleId) + .add("updateMask", updateMask) + .toString(); + } + + UpdateNccPrivateEndpointRuleRequestPb toPb() { + UpdateNccPrivateEndpointRuleRequestPb pb = new UpdateNccPrivateEndpointRuleRequestPb(); + pb.setNetworkConnectivityConfigId(networkConnectivityConfigId); + pb.setPrivateEndpointRule(privateEndpointRule); + pb.setPrivateEndpointRuleId(privateEndpointRuleId); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateNccPrivateEndpointRuleRequest fromPb(UpdateNccPrivateEndpointRuleRequestPb pb) { + UpdateNccPrivateEndpointRuleRequest model = new UpdateNccPrivateEndpointRuleRequest(); + model.setNetworkConnectivityConfigId(pb.getNetworkConnectivityConfigId()); + model.setPrivateEndpointRule(pb.getPrivateEndpointRule()); + model.setPrivateEndpointRuleId(pb.getPrivateEndpointRuleId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateNccPrivateEndpointRuleRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateNccPrivateEndpointRuleRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateNccPrivateEndpointRuleRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateNccPrivateEndpointRuleRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateNccPrivateEndpointRuleRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateNccPrivateEndpointRuleRequestPb pb = + mapper.readValue(p, UpdateNccPrivateEndpointRuleRequestPb.class); + return UpdateNccPrivateEndpointRuleRequest.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequestPb.java similarity index 65% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequestPb.java index 666de476e..f8da32f71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequestPb.java @@ -11,32 +11,19 @@ /** Update a private endpoint rule */ @Generated -public class UpdateNccAzurePrivateEndpointRulePublicRequest { - /** Your Network Connectivity Configuration ID. */ +class UpdateNccPrivateEndpointRuleRequestPb { @JsonIgnore private String networkConnectivityConfigId; - /** - * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure - * portal after initialization. - */ @JsonProperty("private_endpoint_rule") private UpdatePrivateEndpointRule privateEndpointRule; - /** Your private endpoint rule ID. */ @JsonIgnore private String privateEndpointRuleId; - /** - * The field mask must be a single string, with multiple fields separated by commas (no spaces). - * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not - * allowed, as only the entire collection field can be specified. Field names must exactly match - * the resource field names. - */ @JsonIgnore @QueryParam("update_mask") private String updateMask; - public UpdateNccAzurePrivateEndpointRulePublicRequest setNetworkConnectivityConfigId( + public UpdateNccPrivateEndpointRuleRequestPb setNetworkConnectivityConfigId( String networkConnectivityConfigId) { this.networkConnectivityConfigId = networkConnectivityConfigId; return this; @@ -46,7 +33,7 @@ public String getNetworkConnectivityConfigId() { return networkConnectivityConfigId; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRule( + public UpdateNccPrivateEndpointRuleRequestPb setPrivateEndpointRule( UpdatePrivateEndpointRule privateEndpointRule) { this.privateEndpointRule = privateEndpointRule; return this; @@ -56,7 +43,7 @@ public UpdatePrivateEndpointRule getPrivateEndpointRule() { return privateEndpointRule; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRuleId( + public UpdateNccPrivateEndpointRuleRequestPb setPrivateEndpointRuleId( String privateEndpointRuleId) { this.privateEndpointRuleId = privateEndpointRuleId; return this; @@ -66,7 +53,7 @@ public String getPrivateEndpointRuleId() { return privateEndpointRuleId; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setUpdateMask(String updateMask) { + public UpdateNccPrivateEndpointRuleRequestPb setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @@ -79,8 +66,7 @@ public String getUpdateMask() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - UpdateNccAzurePrivateEndpointRulePublicRequest that = - (UpdateNccAzurePrivateEndpointRulePublicRequest) o; + UpdateNccPrivateEndpointRuleRequestPb that = (UpdateNccPrivateEndpointRuleRequestPb) o; return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(privateEndpointRule, that.privateEndpointRule) && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId) @@ -95,7 +81,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateNccAzurePrivateEndpointRulePublicRequest.class) + return new ToStringer(UpdateNccPrivateEndpointRuleRequestPb.class) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("privateEndpointRule", privateEndpointRule) .add("privateEndpointRuleId", privateEndpointRuleId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java index 82fb015bd..5b7eb7806 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update a network policy */ @Generated +@JsonSerialize(using = UpdateNetworkPolicyRequest.UpdateNetworkPolicyRequestSerializer.class) +@JsonDeserialize(using = UpdateNetworkPolicyRequest.UpdateNetworkPolicyRequestDeserializer.class) public class UpdateNetworkPolicyRequest { /** */ - @JsonProperty("network_policy") private AccountNetworkPolicy networkPolicy; /** The unique identifier for the network policy. */ - @JsonIgnore private String networkPolicyId; + private String networkPolicyId; public UpdateNetworkPolicyRequest setNetworkPolicy(AccountNetworkPolicy networkPolicy) { this.networkPolicy = networkPolicy; @@ -57,4 +66,43 @@ public String toString() { .add("networkPolicyId", networkPolicyId) .toString(); } + + UpdateNetworkPolicyRequestPb toPb() { + UpdateNetworkPolicyRequestPb pb = new UpdateNetworkPolicyRequestPb(); + pb.setNetworkPolicy(networkPolicy); + pb.setNetworkPolicyId(networkPolicyId); + + return pb; + } + + static UpdateNetworkPolicyRequest fromPb(UpdateNetworkPolicyRequestPb pb) { + UpdateNetworkPolicyRequest model = new UpdateNetworkPolicyRequest(); + model.setNetworkPolicy(pb.getNetworkPolicy()); + model.setNetworkPolicyId(pb.getNetworkPolicyId()); + + return model; + } + + public static class UpdateNetworkPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateNetworkPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateNetworkPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateNetworkPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateNetworkPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateNetworkPolicyRequestPb pb = mapper.readValue(p, UpdateNetworkPolicyRequestPb.class); + return UpdateNetworkPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequestPb.java new file mode 100755 index 000000000..773f9f69c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a network policy */ +@Generated +class UpdateNetworkPolicyRequestPb { + @JsonProperty("network_policy") + private AccountNetworkPolicy networkPolicy; + + @JsonIgnore private String networkPolicyId; + + public UpdateNetworkPolicyRequestPb setNetworkPolicy(AccountNetworkPolicy networkPolicy) { + this.networkPolicy = networkPolicy; + return this; + } + + public AccountNetworkPolicy getNetworkPolicy() { + return networkPolicy; + } + + public UpdateNetworkPolicyRequestPb setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateNetworkPolicyRequestPb that = (UpdateNetworkPolicyRequestPb) o; + return Objects.equals(networkPolicy, that.networkPolicy) + && Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicy, networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(UpdateNetworkPolicyRequestPb.class) + .add("networkPolicy", networkPolicy) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java index 7554ec304..660139ffb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java @@ -4,25 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateNotificationDestinationRequest.UpdateNotificationDestinationRequestSerializer.class) +@JsonDeserialize( + using = + UpdateNotificationDestinationRequest.UpdateNotificationDestinationRequestDeserializer.class) public class UpdateNotificationDestinationRequest { /** * The configuration for the notification destination. Must wrap EXACTLY one of the nested * configs. */ - @JsonProperty("config") private Config config; /** The display name for the notification destination. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying notification destination. */ - @JsonIgnore private String id; + private String id; public UpdateNotificationDestinationRequest setConfig(Config config) { this.config = config; @@ -74,4 +86,46 @@ public String toString() { .add("id", id) .toString(); } + + UpdateNotificationDestinationRequestPb toPb() { + UpdateNotificationDestinationRequestPb pb = new UpdateNotificationDestinationRequestPb(); + pb.setConfig(config); + pb.setDisplayName(displayName); + pb.setId(id); + + return pb; + } + + static UpdateNotificationDestinationRequest fromPb(UpdateNotificationDestinationRequestPb pb) { + UpdateNotificationDestinationRequest model = new UpdateNotificationDestinationRequest(); + model.setConfig(pb.getConfig()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + + return model; + } + + public static class UpdateNotificationDestinationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateNotificationDestinationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateNotificationDestinationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateNotificationDestinationRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateNotificationDestinationRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateNotificationDestinationRequestPb pb = + mapper.readValue(p, UpdateNotificationDestinationRequestPb.class); + return UpdateNotificationDestinationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequestPb.java new file mode 100755 index 000000000..cec79f57f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateNotificationDestinationRequestPb { + @JsonProperty("config") + private Config config; + + @JsonProperty("display_name") + private String displayName; + + @JsonIgnore private String id; + + public UpdateNotificationDestinationRequestPb setConfig(Config config) { + this.config = config; + return this; + } + + public Config getConfig() { + return config; + } + + public UpdateNotificationDestinationRequestPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateNotificationDestinationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateNotificationDestinationRequestPb that = (UpdateNotificationDestinationRequestPb) o; + return Objects.equals(config, that.config) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(config, displayName, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateNotificationDestinationRequestPb.class) + .add("config", config) + .add("displayName", displayName) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java index 50470709c..f5643a943 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdatePersonalComputeSettingRequest.UpdatePersonalComputeSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdatePersonalComputeSettingRequest.UpdatePersonalComputeSettingRequestDeserializer.class) public class UpdatePersonalComputeSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +38,9 @@ public class UpdatePersonalComputeSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private PersonalComputeSetting setting; public UpdatePersonalComputeSettingRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +93,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdatePersonalComputeSettingRequestPb toPb() { + UpdatePersonalComputeSettingRequestPb pb = new UpdatePersonalComputeSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdatePersonalComputeSettingRequest fromPb(UpdatePersonalComputeSettingRequestPb pb) { + UpdatePersonalComputeSettingRequest model = new UpdatePersonalComputeSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdatePersonalComputeSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePersonalComputeSettingRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePersonalComputeSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePersonalComputeSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdatePersonalComputeSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePersonalComputeSettingRequestPb pb = + mapper.readValue(p, UpdatePersonalComputeSettingRequestPb.class); + return UpdatePersonalComputeSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequestPb.java new file mode 100755 index 000000000..9d75bd471 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdatePersonalComputeSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private PersonalComputeSetting setting; + + public UpdatePersonalComputeSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdatePersonalComputeSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdatePersonalComputeSettingRequestPb setSetting(PersonalComputeSetting setting) { + this.setting = setting; + return this; + } + + public PersonalComputeSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePersonalComputeSettingRequestPb that = (UpdatePersonalComputeSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdatePersonalComputeSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java index f7df95078..988d99b41 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -13,16 +22,35 @@ * portal after initialization. */ @Generated +@JsonSerialize(using = UpdatePrivateEndpointRule.UpdatePrivateEndpointRuleSerializer.class) +@JsonDeserialize(using = UpdatePrivateEndpointRule.UpdatePrivateEndpointRuleDeserializer.class) public class UpdatePrivateEndpointRule { /** - * Only used by private endpoints to customer-managed resources. + * Only used by private endpoints to customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. */ - @JsonProperty("domain_names") private Collection domainNames; + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + private Boolean enabled; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + private Collection resourceNames; + public UpdatePrivateEndpointRule setDomainNames(Collection domainNames) { this.domainNames = domainNames; return this; @@ -32,23 +60,86 @@ public Collection getDomainNames() { return domainNames; } + public UpdatePrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public UpdatePrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdatePrivateEndpointRule that = (UpdatePrivateEndpointRule) o; - return Objects.equals(domainNames, that.domainNames); + return Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames); + return Objects.hash(domainNames, enabled, resourceNames); } @Override public String toString() { return new ToStringer(UpdatePrivateEndpointRule.class) .add("domainNames", domainNames) + .add("enabled", enabled) + .add("resourceNames", resourceNames) .toString(); } + + UpdatePrivateEndpointRulePb toPb() { + UpdatePrivateEndpointRulePb pb = new UpdatePrivateEndpointRulePb(); + pb.setDomainNames(domainNames); + pb.setEnabled(enabled); + pb.setResourceNames(resourceNames); + + return pb; + } + + static UpdatePrivateEndpointRule fromPb(UpdatePrivateEndpointRulePb pb) { + UpdatePrivateEndpointRule model = new UpdatePrivateEndpointRule(); + model.setDomainNames(pb.getDomainNames()); + model.setEnabled(pb.getEnabled()); + model.setResourceNames(pb.getResourceNames()); + + return model; + } + + public static class UpdatePrivateEndpointRuleSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdatePrivateEndpointRule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdatePrivateEndpointRulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdatePrivateEndpointRuleDeserializer + extends JsonDeserializer { + @Override + public UpdatePrivateEndpointRule deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdatePrivateEndpointRulePb pb = mapper.readValue(p, UpdatePrivateEndpointRulePb.class); + return UpdatePrivateEndpointRule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRulePb.java new file mode 100755 index 000000000..454f62c30 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRulePb.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +class UpdatePrivateEndpointRulePb { + @JsonProperty("domain_names") + private Collection domainNames; + + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("resource_names") + private Collection resourceNames; + + public UpdatePrivateEndpointRulePb setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public UpdatePrivateEndpointRulePb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public UpdatePrivateEndpointRulePb setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePrivateEndpointRulePb that = (UpdatePrivateEndpointRulePb) o; + return Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(resourceNames, that.resourceNames); + } + + @Override + public int hashCode() { + return Objects.hash(domainNames, enabled, resourceNames); + } + + @Override + public String toString() { + return new ToStringer(UpdatePrivateEndpointRulePb.class) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("resourceNames", resourceNames) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java index 9ea1a5162..faf816fee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponsePb.java new file mode 100755 index 000000000..e8375f8d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java index e6162fd5b..2d5c9e1bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = + UpdateRestrictWorkspaceAdminsSettingRequest + .UpdateRestrictWorkspaceAdminsSettingRequestSerializer.class) +@JsonDeserialize( + using = + UpdateRestrictWorkspaceAdminsSettingRequest + .UpdateRestrictWorkspaceAdminsSettingRequestDeserializer.class) public class UpdateRestrictWorkspaceAdminsSettingRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +41,9 @@ public class UpdateRestrictWorkspaceAdminsSettingRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private RestrictWorkspaceAdminsSetting setting; public UpdateRestrictWorkspaceAdminsSettingRequest setAllowMissing(Boolean allowMissing) { @@ -84,4 +98,51 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateRestrictWorkspaceAdminsSettingRequestPb toPb() { + UpdateRestrictWorkspaceAdminsSettingRequestPb pb = + new UpdateRestrictWorkspaceAdminsSettingRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateRestrictWorkspaceAdminsSettingRequest fromPb( + UpdateRestrictWorkspaceAdminsSettingRequestPb pb) { + UpdateRestrictWorkspaceAdminsSettingRequest model = + new UpdateRestrictWorkspaceAdminsSettingRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateRestrictWorkspaceAdminsSettingRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateRestrictWorkspaceAdminsSettingRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateRestrictWorkspaceAdminsSettingRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRestrictWorkspaceAdminsSettingRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateRestrictWorkspaceAdminsSettingRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRestrictWorkspaceAdminsSettingRequestPb pb = + mapper.readValue(p, UpdateRestrictWorkspaceAdminsSettingRequestPb.class); + return UpdateRestrictWorkspaceAdminsSettingRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequestPb.java new file mode 100755 index 000000000..d060846f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateRestrictWorkspaceAdminsSettingRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private RestrictWorkspaceAdminsSetting setting; + + public UpdateRestrictWorkspaceAdminsSettingRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateRestrictWorkspaceAdminsSettingRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateRestrictWorkspaceAdminsSettingRequestPb setSetting( + RestrictWorkspaceAdminsSetting setting) { + this.setting = setting; + return this; + } + + public RestrictWorkspaceAdminsSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRestrictWorkspaceAdminsSettingRequestPb that = + (UpdateRestrictWorkspaceAdminsSettingRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateRestrictWorkspaceAdminsSettingRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java index a0d263a52..1394a76e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details required to update a setting. */ @Generated +@JsonSerialize( + using = UpdateSqlResultsDownloadRequest.UpdateSqlResultsDownloadRequestSerializer.class) +@JsonDeserialize( + using = UpdateSqlResultsDownloadRequest.UpdateSqlResultsDownloadRequestDeserializer.class) public class UpdateSqlResultsDownloadRequest { /** This should always be set to true for Settings API. Added for AIP compliance. */ - @JsonProperty("allow_missing") private Boolean allowMissing; /** @@ -25,11 +37,9 @@ public class UpdateSqlResultsDownloadRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("field_mask") private String fieldMask; /** */ - @JsonProperty("setting") private SqlResultsDownload setting; public UpdateSqlResultsDownloadRequest setAllowMissing(Boolean allowMissing) { @@ -82,4 +92,46 @@ public String toString() { .add("setting", setting) .toString(); } + + UpdateSqlResultsDownloadRequestPb toPb() { + UpdateSqlResultsDownloadRequestPb pb = new UpdateSqlResultsDownloadRequestPb(); + pb.setAllowMissing(allowMissing); + pb.setFieldMask(fieldMask); + pb.setSetting(setting); + + return pb; + } + + static UpdateSqlResultsDownloadRequest fromPb(UpdateSqlResultsDownloadRequestPb pb) { + UpdateSqlResultsDownloadRequest model = new UpdateSqlResultsDownloadRequest(); + model.setAllowMissing(pb.getAllowMissing()); + model.setFieldMask(pb.getFieldMask()); + model.setSetting(pb.getSetting()); + + return model; + } + + public static class UpdateSqlResultsDownloadRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateSqlResultsDownloadRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateSqlResultsDownloadRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateSqlResultsDownloadRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateSqlResultsDownloadRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateSqlResultsDownloadRequestPb pb = + mapper.readValue(p, UpdateSqlResultsDownloadRequestPb.class); + return UpdateSqlResultsDownloadRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequestPb.java new file mode 100755 index 000000000..81f5c6c46 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +class UpdateSqlResultsDownloadRequestPb { + @JsonProperty("allow_missing") + private Boolean allowMissing; + + @JsonProperty("field_mask") + private String fieldMask; + + @JsonProperty("setting") + private SqlResultsDownload setting; + + public UpdateSqlResultsDownloadRequestPb setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateSqlResultsDownloadRequestPb setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateSqlResultsDownloadRequestPb setSetting(SqlResultsDownload setting) { + this.setting = setting; + return this; + } + + public SqlResultsDownload getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSqlResultsDownloadRequestPb that = (UpdateSqlResultsDownloadRequestPb) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateSqlResultsDownloadRequestPb.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java index 3dbcb2ba5..c6ef3e1a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java @@ -4,18 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update workspace network option */ @Generated +@JsonSerialize( + using = UpdateWorkspaceNetworkOptionRequest.UpdateWorkspaceNetworkOptionRequestSerializer.class) +@JsonDeserialize( + using = + UpdateWorkspaceNetworkOptionRequest.UpdateWorkspaceNetworkOptionRequestDeserializer.class) public class UpdateWorkspaceNetworkOptionRequest { /** The workspace ID. */ - @JsonIgnore private Long workspaceId; + private Long workspaceId; /** */ - @JsonProperty("workspace_network_option") private WorkspaceNetworkOption workspaceNetworkOption; public UpdateWorkspaceNetworkOptionRequest setWorkspaceId(Long workspaceId) { @@ -58,4 +70,44 @@ public String toString() { .add("workspaceNetworkOption", workspaceNetworkOption) .toString(); } + + UpdateWorkspaceNetworkOptionRequestPb toPb() { + UpdateWorkspaceNetworkOptionRequestPb pb = new UpdateWorkspaceNetworkOptionRequestPb(); + pb.setWorkspaceId(workspaceId); + pb.setWorkspaceNetworkOption(workspaceNetworkOption); + + return pb; + } + + static UpdateWorkspaceNetworkOptionRequest fromPb(UpdateWorkspaceNetworkOptionRequestPb pb) { + UpdateWorkspaceNetworkOptionRequest model = new UpdateWorkspaceNetworkOptionRequest(); + model.setWorkspaceId(pb.getWorkspaceId()); + model.setWorkspaceNetworkOption(pb.getWorkspaceNetworkOption()); + + return model; + } + + public static class UpdateWorkspaceNetworkOptionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateWorkspaceNetworkOptionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateWorkspaceNetworkOptionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateWorkspaceNetworkOptionRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateWorkspaceNetworkOptionRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateWorkspaceNetworkOptionRequestPb pb = + mapper.readValue(p, UpdateWorkspaceNetworkOptionRequestPb.class); + return UpdateWorkspaceNetworkOptionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequestPb.java new file mode 100755 index 000000000..f3143f08c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update workspace network option */ +@Generated +class UpdateWorkspaceNetworkOptionRequestPb { + @JsonIgnore private Long workspaceId; + + @JsonProperty("workspace_network_option") + private WorkspaceNetworkOption workspaceNetworkOption; + + public UpdateWorkspaceNetworkOptionRequestPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public UpdateWorkspaceNetworkOptionRequestPb setWorkspaceNetworkOption( + WorkspaceNetworkOption workspaceNetworkOption) { + this.workspaceNetworkOption = workspaceNetworkOption; + return this; + } + + public WorkspaceNetworkOption getWorkspaceNetworkOption() { + return workspaceNetworkOption; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceNetworkOptionRequestPb that = (UpdateWorkspaceNetworkOptionRequestPb) o; + return Objects.equals(workspaceId, that.workspaceId) + && Objects.equals(workspaceNetworkOption, that.workspaceNetworkOption); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId, workspaceNetworkOption); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceNetworkOptionRequestPb.class) + .add("workspaceId", workspaceId) + .add("workspaceNetworkOption", workspaceNetworkOption) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java index b2e71a210..bd660646d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java @@ -25,7 +25,7 @@ public WorkspaceNetworkOption getWorkspaceNetworkOptionRpc( apiClient.configuredAccountID(), request.getWorkspaceId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WorkspaceNetworkOption.class); } catch (IOException e) { @@ -43,7 +43,7 @@ public WorkspaceNetworkOption updateWorkspaceNetworkOptionRpc( try { Request req = new Request("PUT", path, apiClient.serialize(request.getWorkspaceNetworkOption())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WorkspaceNetworkOption.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java index 796892bf0..93fc7f29b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WorkspaceNetworkOption.WorkspaceNetworkOptionSerializer.class) +@JsonDeserialize(using = WorkspaceNetworkOption.WorkspaceNetworkOptionDeserializer.class) public class WorkspaceNetworkOption { /** * The network policy ID to apply to the workspace. This controls the network access rules for all * serverless compute resources in the workspace. Each workspace can only be linked to one policy * at a time. If no policy is explicitly assigned, the workspace will use 'default-policy'. */ - @JsonProperty("network_policy_id") private String networkPolicyId; /** The workspace ID. */ - @JsonProperty("workspace_id") private Long workspaceId; public WorkspaceNetworkOption setNetworkPolicyId(String networkPolicyId) { @@ -60,4 +69,43 @@ public String toString() { .add("workspaceId", workspaceId) .toString(); } + + WorkspaceNetworkOptionPb toPb() { + WorkspaceNetworkOptionPb pb = new WorkspaceNetworkOptionPb(); + pb.setNetworkPolicyId(networkPolicyId); + pb.setWorkspaceId(workspaceId); + + return pb; + } + + static WorkspaceNetworkOption fromPb(WorkspaceNetworkOptionPb pb) { + WorkspaceNetworkOption model = new WorkspaceNetworkOption(); + model.setNetworkPolicyId(pb.getNetworkPolicyId()); + model.setWorkspaceId(pb.getWorkspaceId()); + + return model; + } + + public static class WorkspaceNetworkOptionSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceNetworkOption value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceNetworkOptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceNetworkOptionDeserializer + extends JsonDeserializer { + @Override + public WorkspaceNetworkOption deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceNetworkOptionPb pb = mapper.readValue(p, WorkspaceNetworkOptionPb.class); + return WorkspaceNetworkOption.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOptionPb.java new file mode 100755 index 000000000..73fcf17bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WorkspaceNetworkOptionPb { + @JsonProperty("network_policy_id") + private String networkPolicyId; + + @JsonProperty("workspace_id") + private Long workspaceId; + + public WorkspaceNetworkOptionPb setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + public WorkspaceNetworkOptionPb setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceNetworkOptionPb that = (WorkspaceNetworkOptionPb) o; + return Objects.equals(networkPolicyId, that.networkPolicyId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceNetworkOptionPb.class) + .add("networkPolicyId", networkPolicyId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Converters.java new file mode 100755 index 000000000..a31ba657c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.sharing; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java index 0f8a544c2..e68963580 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java @@ -4,21 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create recipient federation policy */ @Generated +@JsonSerialize(using = CreateFederationPolicyRequest.CreateFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = CreateFederationPolicyRequest.CreateFederationPolicyRequestDeserializer.class) public class CreateFederationPolicyRequest { /** */ - @JsonProperty("policy") private FederationPolicy policy; /** * Name of the recipient. This is the name of the recipient for which the policy is being created. */ - @JsonIgnore private String recipientName; + private String recipientName; public CreateFederationPolicyRequest setPolicy(FederationPolicy policy) { this.policy = policy; @@ -58,4 +68,44 @@ public String toString() { .add("recipientName", recipientName) .toString(); } + + CreateFederationPolicyRequestPb toPb() { + CreateFederationPolicyRequestPb pb = new CreateFederationPolicyRequestPb(); + pb.setPolicy(policy); + pb.setRecipientName(recipientName); + + return pb; + } + + static CreateFederationPolicyRequest fromPb(CreateFederationPolicyRequestPb pb) { + CreateFederationPolicyRequest model = new CreateFederationPolicyRequest(); + model.setPolicy(pb.getPolicy()); + model.setRecipientName(pb.getRecipientName()); + + return model; + } + + public static class CreateFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateFederationPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateFederationPolicyRequestPb pb = + mapper.readValue(p, CreateFederationPolicyRequestPb.class); + return CreateFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequestPb.java new file mode 100755 index 000000000..522a7b2f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create recipient federation policy */ +@Generated +class CreateFederationPolicyRequestPb { + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore private String recipientName; + + public CreateFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public CreateFederationPolicyRequestPb setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFederationPolicyRequestPb that = (CreateFederationPolicyRequestPb) o; + return Objects.equals(policy, that.policy) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(policy, recipientName); + } + + @Override + public String toString() { + return new ToStringer(CreateFederationPolicyRequestPb.class) + .add("policy", policy) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java index f534903c8..93d603953 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateProvider.CreateProviderSerializer.class) +@JsonDeserialize(using = CreateProvider.CreateProviderDeserializer.class) public class CreateProvider { /** The delta sharing authentication type. */ - @JsonProperty("authentication_type") private AuthenticationType authenticationType; /** Description about the provider. */ - @JsonProperty("comment") private String comment; /** The name of the Provider. */ - @JsonProperty("name") private String name; /** * This field is required when the __authentication_type__ is **TOKEN**, * **OAUTH_CLIENT_CREDENTIALS** or not provided. */ - @JsonProperty("recipient_profile_str") private String recipientProfileStr; public CreateProvider setAuthenticationType(AuthenticationType authenticationType) { @@ -89,4 +96,44 @@ public String toString() { .add("recipientProfileStr", recipientProfileStr) .toString(); } + + CreateProviderPb toPb() { + CreateProviderPb pb = new CreateProviderPb(); + pb.setAuthenticationType(authenticationType); + pb.setComment(comment); + pb.setName(name); + pb.setRecipientProfileStr(recipientProfileStr); + + return pb; + } + + static CreateProvider fromPb(CreateProviderPb pb) { + CreateProvider model = new CreateProvider(); + model.setAuthenticationType(pb.getAuthenticationType()); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setRecipientProfileStr(pb.getRecipientProfileStr()); + + return model; + } + + public static class CreateProviderSerializer extends JsonSerializer { + @Override + public void serialize(CreateProvider value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateProviderPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateProviderDeserializer extends JsonDeserializer { + @Override + public CreateProvider deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateProviderPb pb = mapper.readValue(p, CreateProviderPb.class); + return CreateProvider.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProviderPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProviderPb.java new file mode 100755 index 000000000..7a2359f0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProviderPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateProviderPb { + @JsonProperty("authentication_type") + private AuthenticationType authenticationType; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("recipient_profile_str") + private String recipientProfileStr; + + public CreateProviderPb setAuthenticationType(AuthenticationType authenticationType) { + this.authenticationType = authenticationType; + return this; + } + + public AuthenticationType getAuthenticationType() { + return authenticationType; + } + + public CreateProviderPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateProviderPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateProviderPb setRecipientProfileStr(String recipientProfileStr) { + this.recipientProfileStr = recipientProfileStr; + return this; + } + + public String getRecipientProfileStr() { + return recipientProfileStr; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProviderPb that = (CreateProviderPb) o; + return Objects.equals(authenticationType, that.authenticationType) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(recipientProfileStr, that.recipientProfileStr); + } + + @Override + public int hashCode() { + return Objects.hash(authenticationType, comment, name, recipientProfileStr); + } + + @Override + public String toString() { + return new ToStringer(CreateProviderPb.class) + .add("authenticationType", authenticationType) + .add("comment", comment) + .add("name", name) + .add("recipientProfileStr", recipientProfileStr) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java index b7589d41f..53c87a90f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRecipient.CreateRecipientSerializer.class) +@JsonDeserialize(using = CreateRecipient.CreateRecipientDeserializer.class) public class CreateRecipient { /** The delta sharing authentication type. */ - @JsonProperty("authentication_type") private AuthenticationType authenticationType; /** Description about the recipient. */ - @JsonProperty("comment") private String comment; /** @@ -22,23 +31,18 @@ public class CreateRecipient { * present when the __authentication_type__ is **DATABRICKS**. The identifier is of format * __cloud__:__region__:__metastore-uuid__. */ - @JsonProperty("data_recipient_global_metastore_id") private String dataRecipientGlobalMetastoreId; /** Expiration timestamp of the token, in epoch milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** IP Access List */ - @JsonProperty("ip_access_list") private IpAccessList ipAccessList; /** Name of Recipient. */ - @JsonProperty("name") private String name; /** Username of the recipient owner. */ - @JsonProperty("owner") private String owner; /** @@ -46,14 +50,12 @@ public class CreateRecipient { * specified properties will override the existing properties. To add and remove properties, one * would need to perform a read-modify-write. */ - @JsonProperty("properties_kvpairs") private SecurablePropertiesKvPairs propertiesKvpairs; /** * The one-time sharing code provided by the data recipient. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("sharing_code") private String sharingCode; public CreateRecipient setAuthenticationType(AuthenticationType authenticationType) { @@ -181,4 +183,54 @@ public String toString() { .add("sharingCode", sharingCode) .toString(); } + + CreateRecipientPb toPb() { + CreateRecipientPb pb = new CreateRecipientPb(); + pb.setAuthenticationType(authenticationType); + pb.setComment(comment); + pb.setDataRecipientGlobalMetastoreId(dataRecipientGlobalMetastoreId); + pb.setExpirationTime(expirationTime); + pb.setIpAccessList(ipAccessList); + pb.setName(name); + pb.setOwner(owner); + pb.setPropertiesKvpairs(propertiesKvpairs); + pb.setSharingCode(sharingCode); + + return pb; + } + + static CreateRecipient fromPb(CreateRecipientPb pb) { + CreateRecipient model = new CreateRecipient(); + model.setAuthenticationType(pb.getAuthenticationType()); + model.setComment(pb.getComment()); + model.setDataRecipientGlobalMetastoreId(pb.getDataRecipientGlobalMetastoreId()); + model.setExpirationTime(pb.getExpirationTime()); + model.setIpAccessList(pb.getIpAccessList()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPropertiesKvpairs(pb.getPropertiesKvpairs()); + model.setSharingCode(pb.getSharingCode()); + + return model; + } + + public static class CreateRecipientSerializer extends JsonSerializer { + @Override + public void serialize(CreateRecipient value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRecipientPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRecipientDeserializer extends JsonDeserializer { + @Override + public CreateRecipient deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRecipientPb pb = mapper.readValue(p, CreateRecipientPb.class); + return CreateRecipient.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipientPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipientPb.java new file mode 100755 index 000000000..b4fad7fd0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipientPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateRecipientPb { + @JsonProperty("authentication_type") + private AuthenticationType authenticationType; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("data_recipient_global_metastore_id") + private String dataRecipientGlobalMetastoreId; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("ip_access_list") + private IpAccessList ipAccessList; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties_kvpairs") + private SecurablePropertiesKvPairs propertiesKvpairs; + + @JsonProperty("sharing_code") + private String sharingCode; + + public CreateRecipientPb setAuthenticationType(AuthenticationType authenticationType) { + this.authenticationType = authenticationType; + return this; + } + + public AuthenticationType getAuthenticationType() { + return authenticationType; + } + + public CreateRecipientPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateRecipientPb setDataRecipientGlobalMetastoreId( + String dataRecipientGlobalMetastoreId) { + this.dataRecipientGlobalMetastoreId = dataRecipientGlobalMetastoreId; + return this; + } + + public String getDataRecipientGlobalMetastoreId() { + return dataRecipientGlobalMetastoreId; + } + + public CreateRecipientPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public CreateRecipientPb setIpAccessList(IpAccessList ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessList getIpAccessList() { + return ipAccessList; + } + + public CreateRecipientPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateRecipientPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CreateRecipientPb setPropertiesKvpairs(SecurablePropertiesKvPairs propertiesKvpairs) { + this.propertiesKvpairs = propertiesKvpairs; + return this; + } + + public SecurablePropertiesKvPairs getPropertiesKvpairs() { + return propertiesKvpairs; + } + + public CreateRecipientPb setSharingCode(String sharingCode) { + this.sharingCode = sharingCode; + return this; + } + + public String getSharingCode() { + return sharingCode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRecipientPb that = (CreateRecipientPb) o; + return Objects.equals(authenticationType, that.authenticationType) + && Objects.equals(comment, that.comment) + && Objects.equals(dataRecipientGlobalMetastoreId, that.dataRecipientGlobalMetastoreId) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(ipAccessList, that.ipAccessList) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(propertiesKvpairs, that.propertiesKvpairs) + && Objects.equals(sharingCode, that.sharingCode); + } + + @Override + public int hashCode() { + return Objects.hash( + authenticationType, + comment, + dataRecipientGlobalMetastoreId, + expirationTime, + ipAccessList, + name, + owner, + propertiesKvpairs, + sharingCode); + } + + @Override + public String toString() { + return new ToStringer(CreateRecipientPb.class) + .add("authenticationType", authenticationType) + .add("comment", comment) + .add("dataRecipientGlobalMetastoreId", dataRecipientGlobalMetastoreId) + .add("expirationTime", expirationTime) + .add("ipAccessList", ipAccessList) + .add("name", name) + .add("owner", owner) + .add("propertiesKvpairs", propertiesKvpairs) + .add("sharingCode", sharingCode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java index 012bc8f99..772ee33ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateShare.CreateShareSerializer.class) +@JsonDeserialize(using = CreateShare.CreateShareDeserializer.class) public class CreateShare { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Name of the share. */ - @JsonProperty("name") private String name; /** Storage root URL for the share. */ - @JsonProperty("storage_root") private String storageRoot; public CreateShare setComment(String comment) { @@ -71,4 +79,41 @@ public String toString() { .add("storageRoot", storageRoot) .toString(); } + + CreateSharePb toPb() { + CreateSharePb pb = new CreateSharePb(); + pb.setComment(comment); + pb.setName(name); + pb.setStorageRoot(storageRoot); + + return pb; + } + + static CreateShare fromPb(CreateSharePb pb) { + CreateShare model = new CreateShare(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setStorageRoot(pb.getStorageRoot()); + + return model; + } + + public static class CreateShareSerializer extends JsonSerializer { + @Override + public void serialize(CreateShare value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateSharePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateShareDeserializer extends JsonDeserializer { + @Override + public CreateShare deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateSharePb pb = mapper.readValue(p, CreateSharePb.class); + return CreateShare.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateSharePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateSharePb.java new file mode 100755 index 000000000..2ba60d9c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateSharePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateSharePb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("storage_root") + private String storageRoot; + + public CreateSharePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateSharePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateSharePb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSharePb that = (CreateSharePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(storageRoot, that.storageRoot); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, storageRoot); + } + + @Override + public String toString() { + return new ToStringer(CreateSharePb.class) + .add("comment", comment) + .add("name", name) + .add("storageRoot", storageRoot) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java index 1453ea54e..5cba5b255 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java @@ -4,19 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete recipient federation policy */ @Generated +@JsonSerialize(using = DeleteFederationPolicyRequest.DeleteFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = DeleteFederationPolicyRequest.DeleteFederationPolicyRequestDeserializer.class) public class DeleteFederationPolicyRequest { /** Name of the policy. This is the name of the policy to be deleted. */ - @JsonIgnore private String name; + private String name; /** * Name of the recipient. This is the name of the recipient for which the policy is being deleted. */ - @JsonIgnore private String recipientName; + private String recipientName; public DeleteFederationPolicyRequest setName(String name) { this.name = name; @@ -56,4 +68,44 @@ public String toString() { .add("recipientName", recipientName) .toString(); } + + DeleteFederationPolicyRequestPb toPb() { + DeleteFederationPolicyRequestPb pb = new DeleteFederationPolicyRequestPb(); + pb.setName(name); + pb.setRecipientName(recipientName); + + return pb; + } + + static DeleteFederationPolicyRequest fromPb(DeleteFederationPolicyRequestPb pb) { + DeleteFederationPolicyRequest model = new DeleteFederationPolicyRequest(); + model.setName(pb.getName()); + model.setRecipientName(pb.getRecipientName()); + + return model; + } + + public static class DeleteFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteFederationPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteFederationPolicyRequestPb pb = + mapper.readValue(p, DeleteFederationPolicyRequestPb.class); + return DeleteFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequestPb.java new file mode 100755 index 000000000..a422ddf56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete recipient federation policy */ +@Generated +class DeleteFederationPolicyRequestPb { + @JsonIgnore private String name; + + @JsonIgnore private String recipientName; + + public DeleteFederationPolicyRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteFederationPolicyRequestPb setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFederationPolicyRequestPb that = (DeleteFederationPolicyRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(name, recipientName); + } + + @Override + public String toString() { + return new ToStringer(DeleteFederationPolicyRequestPb.class) + .add("name", name) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java index ce8ef5901..f831741af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a provider */ @Generated +@JsonSerialize(using = DeleteProviderRequest.DeleteProviderRequestSerializer.class) +@JsonDeserialize(using = DeleteProviderRequest.DeleteProviderRequestDeserializer.class) public class DeleteProviderRequest { /** Name of the provider. */ - @JsonIgnore private String name; + private String name; public DeleteProviderRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteProviderRequest.class).add("name", name).toString(); } + + DeleteProviderRequestPb toPb() { + DeleteProviderRequestPb pb = new DeleteProviderRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteProviderRequest fromPb(DeleteProviderRequestPb pb) { + DeleteProviderRequest model = new DeleteProviderRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteProviderRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteProviderRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteProviderRequestPb pb = mapper.readValue(p, DeleteProviderRequestPb.class); + return DeleteProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequestPb.java new file mode 100755 index 000000000..e24b82f92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a provider */ +@Generated +class DeleteProviderRequestPb { + @JsonIgnore private String name; + + public DeleteProviderRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteProviderRequestPb that = (DeleteProviderRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteProviderRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java index 5734ea84f..6cb378deb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a share recipient */ @Generated +@JsonSerialize(using = DeleteRecipientRequest.DeleteRecipientRequestSerializer.class) +@JsonDeserialize(using = DeleteRecipientRequest.DeleteRecipientRequestDeserializer.class) public class DeleteRecipientRequest { /** Name of the recipient. */ - @JsonIgnore private String name; + private String name; public DeleteRecipientRequest setName(String name) { this.name = name; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRecipientRequest.class).add("name", name).toString(); } + + DeleteRecipientRequestPb toPb() { + DeleteRecipientRequestPb pb = new DeleteRecipientRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteRecipientRequest fromPb(DeleteRecipientRequestPb pb) { + DeleteRecipientRequest model = new DeleteRecipientRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteRecipientRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteRecipientRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRecipientRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRecipientRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteRecipientRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRecipientRequestPb pb = mapper.readValue(p, DeleteRecipientRequestPb.class); + return DeleteRecipientRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequestPb.java new file mode 100755 index 000000000..8e879088b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a share recipient */ +@Generated +class DeleteRecipientRequestPb { + @JsonIgnore private String name; + + public DeleteRecipientRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRecipientRequestPb that = (DeleteRecipientRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteRecipientRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java index 56f100810..b1c448de0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponsePb.java new file mode 100755 index 000000000..c5c54bf5b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java index e6276a311..1e73ddb61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a share */ @Generated +@JsonSerialize(using = DeleteShareRequest.DeleteShareRequestSerializer.class) +@JsonDeserialize(using = DeleteShareRequest.DeleteShareRequestDeserializer.class) public class DeleteShareRequest { /** The name of the share. */ - @JsonIgnore private String name; + private String name; public DeleteShareRequest setName(String name) { this.name = name; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteShareRequest.class).add("name", name).toString(); } + + DeleteShareRequestPb toPb() { + DeleteShareRequestPb pb = new DeleteShareRequestPb(); + pb.setName(name); + + return pb; + } + + static DeleteShareRequest fromPb(DeleteShareRequestPb pb) { + DeleteShareRequest model = new DeleteShareRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class DeleteShareRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteShareRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteShareRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteShareRequestDeserializer extends JsonDeserializer { + @Override + public DeleteShareRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteShareRequestPb pb = mapper.readValue(p, DeleteShareRequestPb.class); + return DeleteShareRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequestPb.java new file mode 100755 index 000000000..f54d33a9b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a share */ +@Generated +class DeleteShareRequestPb { + @JsonIgnore private String name; + + public DeleteShareRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteShareRequestPb that = (DeleteShareRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteShareRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java index 93166cac2..691e7c333 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Represents a UC dependency. */ @Generated +@JsonSerialize(using = DeltaSharingDependency.DeltaSharingDependencySerializer.class) +@JsonDeserialize(using = DeltaSharingDependency.DeltaSharingDependencyDeserializer.class) public class DeltaSharingDependency { /** A Function in UC as a dependency. */ - @JsonProperty("function") private DeltaSharingFunctionDependency function; /** A Table in UC as a dependency. */ - @JsonProperty("table") private DeltaSharingTableDependency table; public DeltaSharingDependency setFunction(DeltaSharingFunctionDependency function) { @@ -56,4 +65,43 @@ public String toString() { .add("table", table) .toString(); } + + DeltaSharingDependencyPb toPb() { + DeltaSharingDependencyPb pb = new DeltaSharingDependencyPb(); + pb.setFunction(function); + pb.setTable(table); + + return pb; + } + + static DeltaSharingDependency fromPb(DeltaSharingDependencyPb pb) { + DeltaSharingDependency model = new DeltaSharingDependency(); + model.setFunction(pb.getFunction()); + model.setTable(pb.getTable()); + + return model; + } + + public static class DeltaSharingDependencySerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSharingDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSharingDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSharingDependencyDeserializer + extends JsonDeserializer { + @Override + public DeltaSharingDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSharingDependencyPb pb = mapper.readValue(p, DeltaSharingDependencyPb.class); + return DeltaSharingDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java index b4a4a0b23..c7665722a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java @@ -4,15 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Represents a list of dependencies. */ @Generated +@JsonSerialize(using = DeltaSharingDependencyList.DeltaSharingDependencyListSerializer.class) +@JsonDeserialize(using = DeltaSharingDependencyList.DeltaSharingDependencyListDeserializer.class) public class DeltaSharingDependencyList { /** An array of Dependency. */ - @JsonProperty("dependencies") private Collection dependencies; public DeltaSharingDependencyList setDependencies( @@ -44,4 +54,41 @@ public String toString() { .add("dependencies", dependencies) .toString(); } + + DeltaSharingDependencyListPb toPb() { + DeltaSharingDependencyListPb pb = new DeltaSharingDependencyListPb(); + pb.setDependencies(dependencies); + + return pb; + } + + static DeltaSharingDependencyList fromPb(DeltaSharingDependencyListPb pb) { + DeltaSharingDependencyList model = new DeltaSharingDependencyList(); + model.setDependencies(pb.getDependencies()); + + return model; + } + + public static class DeltaSharingDependencyListSerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSharingDependencyList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSharingDependencyListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSharingDependencyListDeserializer + extends JsonDeserializer { + @Override + public DeltaSharingDependencyList deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSharingDependencyListPb pb = mapper.readValue(p, DeltaSharingDependencyListPb.class); + return DeltaSharingDependencyList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyListPb.java new file mode 100755 index 000000000..24ac6693a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyListPb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Represents a list of dependencies. */ +@Generated +class DeltaSharingDependencyListPb { + @JsonProperty("dependencies") + private Collection dependencies; + + public DeltaSharingDependencyListPb setDependencies( + Collection dependencies) { + this.dependencies = dependencies; + return this; + } + + public Collection getDependencies() { + return dependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingDependencyListPb that = (DeltaSharingDependencyListPb) o; + return Objects.equals(dependencies, that.dependencies); + } + + @Override + public int hashCode() { + return Objects.hash(dependencies); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingDependencyListPb.class) + .add("dependencies", dependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyPb.java new file mode 100755 index 000000000..f926c1361 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a UC dependency. */ +@Generated +class DeltaSharingDependencyPb { + @JsonProperty("function") + private DeltaSharingFunctionDependency function; + + @JsonProperty("table") + private DeltaSharingTableDependency table; + + public DeltaSharingDependencyPb setFunction(DeltaSharingFunctionDependency function) { + this.function = function; + return this; + } + + public DeltaSharingFunctionDependency getFunction() { + return function; + } + + public DeltaSharingDependencyPb setTable(DeltaSharingTableDependency table) { + this.table = table; + return this; + } + + public DeltaSharingTableDependency getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingDependencyPb that = (DeltaSharingDependencyPb) o; + return Objects.equals(function, that.function) && Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(function, table); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingDependencyPb.class) + .add("function", function) + .add("table", table) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java index 6f59535c9..83db72367 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java @@ -4,74 +4,69 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DeltaSharingFunction.DeltaSharingFunctionSerializer.class) +@JsonDeserialize(using = DeltaSharingFunction.DeltaSharingFunctionDeserializer.class) public class DeltaSharingFunction { /** The aliass of registered model. */ - @JsonProperty("aliases") private Collection aliases; /** The comment of the function. */ - @JsonProperty("comment") private String comment; /** The data type of the function. */ - @JsonProperty("data_type") private ColumnTypeName dataType; /** The dependency list of the function. */ - @JsonProperty("dependency_list") private DeltaSharingDependencyList dependencyList; /** The full data type of the function. */ - @JsonProperty("full_data_type") private String fullDataType; /** The id of the function. */ - @JsonProperty("id") private String id; /** The function parameter information. */ - @JsonProperty("input_params") private FunctionParameterInfos inputParams; /** The name of the function. */ - @JsonProperty("name") private String name; /** The properties of the function. */ - @JsonProperty("properties") private String properties; /** The routine definition of the function. */ - @JsonProperty("routine_definition") private String routineDefinition; /** The name of the schema that the function belongs to. */ - @JsonProperty("schema") private String schema; /** The securable kind of the function. */ - @JsonProperty("securable_kind") private SharedSecurableKind securableKind; /** The name of the share that the function belongs to. */ - @JsonProperty("share") private String share; /** The id of the share that the function belongs to. */ - @JsonProperty("share_id") private String shareId; /** The storage location of the function. */ - @JsonProperty("storage_location") private String storageLocation; /** The tags of the function. */ - @JsonProperty("tags") private Collection tags; public DeltaSharingFunction setAliases(Collection aliases) { @@ -284,4 +279,70 @@ public String toString() { .add("tags", tags) .toString(); } + + DeltaSharingFunctionPb toPb() { + DeltaSharingFunctionPb pb = new DeltaSharingFunctionPb(); + pb.setAliases(aliases); + pb.setComment(comment); + pb.setDataType(dataType); + pb.setDependencyList(dependencyList); + pb.setFullDataType(fullDataType); + pb.setId(id); + pb.setInputParams(inputParams); + pb.setName(name); + pb.setProperties(properties); + pb.setRoutineDefinition(routineDefinition); + pb.setSchema(schema); + pb.setSecurableKind(securableKind); + pb.setShare(share); + pb.setShareId(shareId); + pb.setStorageLocation(storageLocation); + pb.setTags(tags); + + return pb; + } + + static DeltaSharingFunction fromPb(DeltaSharingFunctionPb pb) { + DeltaSharingFunction model = new DeltaSharingFunction(); + model.setAliases(pb.getAliases()); + model.setComment(pb.getComment()); + model.setDataType(pb.getDataType()); + model.setDependencyList(pb.getDependencyList()); + model.setFullDataType(pb.getFullDataType()); + model.setId(pb.getId()); + model.setInputParams(pb.getInputParams()); + model.setName(pb.getName()); + model.setProperties(pb.getProperties()); + model.setRoutineDefinition(pb.getRoutineDefinition()); + model.setSchema(pb.getSchema()); + model.setSecurableKind(pb.getSecurableKind()); + model.setShare(pb.getShare()); + model.setShareId(pb.getShareId()); + model.setStorageLocation(pb.getStorageLocation()); + model.setTags(pb.getTags()); + + return model; + } + + public static class DeltaSharingFunctionSerializer extends JsonSerializer { + @Override + public void serialize( + DeltaSharingFunction value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSharingFunctionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSharingFunctionDeserializer + extends JsonDeserializer { + @Override + public DeltaSharingFunction deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSharingFunctionPb pb = mapper.readValue(p, DeltaSharingFunctionPb.class); + return DeltaSharingFunction.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java index 33c02ebb8..4fa084ba2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A Function in UC as a dependency. */ @Generated +@JsonSerialize( + using = DeltaSharingFunctionDependency.DeltaSharingFunctionDependencySerializer.class) +@JsonDeserialize( + using = DeltaSharingFunctionDependency.DeltaSharingFunctionDependencyDeserializer.class) public class DeltaSharingFunctionDependency { /** */ - @JsonProperty("function_name") private String functionName; /** */ - @JsonProperty("schema_name") private String schemaName; public DeltaSharingFunctionDependency setFunctionName(String functionName) { @@ -57,4 +68,44 @@ public String toString() { .add("schemaName", schemaName) .toString(); } + + DeltaSharingFunctionDependencyPb toPb() { + DeltaSharingFunctionDependencyPb pb = new DeltaSharingFunctionDependencyPb(); + pb.setFunctionName(functionName); + pb.setSchemaName(schemaName); + + return pb; + } + + static DeltaSharingFunctionDependency fromPb(DeltaSharingFunctionDependencyPb pb) { + DeltaSharingFunctionDependency model = new DeltaSharingFunctionDependency(); + model.setFunctionName(pb.getFunctionName()); + model.setSchemaName(pb.getSchemaName()); + + return model; + } + + public static class DeltaSharingFunctionDependencySerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSharingFunctionDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSharingFunctionDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSharingFunctionDependencyDeserializer + extends JsonDeserializer { + @Override + public DeltaSharingFunctionDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSharingFunctionDependencyPb pb = + mapper.readValue(p, DeltaSharingFunctionDependencyPb.class); + return DeltaSharingFunctionDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependencyPb.java new file mode 100755 index 000000000..0f02bc434 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependencyPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A Function in UC as a dependency. */ +@Generated +class DeltaSharingFunctionDependencyPb { + @JsonProperty("function_name") + private String functionName; + + @JsonProperty("schema_name") + private String schemaName; + + public DeltaSharingFunctionDependencyPb setFunctionName(String functionName) { + this.functionName = functionName; + return this; + } + + public String getFunctionName() { + return functionName; + } + + public DeltaSharingFunctionDependencyPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingFunctionDependencyPb that = (DeltaSharingFunctionDependencyPb) o; + return Objects.equals(functionName, that.functionName) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(functionName, schemaName); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingFunctionDependencyPb.class) + .add("functionName", functionName) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionPb.java new file mode 100755 index 000000000..bea37e5cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionPb.java @@ -0,0 +1,271 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DeltaSharingFunctionPb { + @JsonProperty("aliases") + private Collection aliases; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("data_type") + private ColumnTypeName dataType; + + @JsonProperty("dependency_list") + private DeltaSharingDependencyList dependencyList; + + @JsonProperty("full_data_type") + private String fullDataType; + + @JsonProperty("id") + private String id; + + @JsonProperty("input_params") + private FunctionParameterInfos inputParams; + + @JsonProperty("name") + private String name; + + @JsonProperty("properties") + private String properties; + + @JsonProperty("routine_definition") + private String routineDefinition; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("securable_kind") + private SharedSecurableKind securableKind; + + @JsonProperty("share") + private String share; + + @JsonProperty("share_id") + private String shareId; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("tags") + private Collection tags; + + public DeltaSharingFunctionPb setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public DeltaSharingFunctionPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public DeltaSharingFunctionPb setDataType(ColumnTypeName dataType) { + this.dataType = dataType; + return this; + } + + public ColumnTypeName getDataType() { + return dataType; + } + + public DeltaSharingFunctionPb setDependencyList(DeltaSharingDependencyList dependencyList) { + this.dependencyList = dependencyList; + return this; + } + + public DeltaSharingDependencyList getDependencyList() { + return dependencyList; + } + + public DeltaSharingFunctionPb setFullDataType(String fullDataType) { + this.fullDataType = fullDataType; + return this; + } + + public String getFullDataType() { + return fullDataType; + } + + public DeltaSharingFunctionPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public DeltaSharingFunctionPb setInputParams(FunctionParameterInfos inputParams) { + this.inputParams = inputParams; + return this; + } + + public FunctionParameterInfos getInputParams() { + return inputParams; + } + + public DeltaSharingFunctionPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeltaSharingFunctionPb setProperties(String properties) { + this.properties = properties; + return this; + } + + public String getProperties() { + return properties; + } + + public DeltaSharingFunctionPb setRoutineDefinition(String routineDefinition) { + this.routineDefinition = routineDefinition; + return this; + } + + public String getRoutineDefinition() { + return routineDefinition; + } + + public DeltaSharingFunctionPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public DeltaSharingFunctionPb setSecurableKind(SharedSecurableKind securableKind) { + this.securableKind = securableKind; + return this; + } + + public SharedSecurableKind getSecurableKind() { + return securableKind; + } + + public DeltaSharingFunctionPb setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public DeltaSharingFunctionPb setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public DeltaSharingFunctionPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public DeltaSharingFunctionPb setTags( + Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingFunctionPb that = (DeltaSharingFunctionPb) o; + return Objects.equals(aliases, that.aliases) + && Objects.equals(comment, that.comment) + && Objects.equals(dataType, that.dataType) + && Objects.equals(dependencyList, that.dependencyList) + && Objects.equals(fullDataType, that.fullDataType) + && Objects.equals(id, that.id) + && Objects.equals(inputParams, that.inputParams) + && Objects.equals(name, that.name) + && Objects.equals(properties, that.properties) + && Objects.equals(routineDefinition, that.routineDefinition) + && Objects.equals(schema, that.schema) + && Objects.equals(securableKind, that.securableKind) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + aliases, + comment, + dataType, + dependencyList, + fullDataType, + id, + inputParams, + name, + properties, + routineDefinition, + schema, + securableKind, + share, + shareId, + storageLocation, + tags); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingFunctionPb.class) + .add("aliases", aliases) + .add("comment", comment) + .add("dataType", dataType) + .add("dependencyList", dependencyList) + .add("fullDataType", fullDataType) + .add("id", id) + .add("inputParams", inputParams) + .add("name", name) + .add("properties", properties) + .add("routineDefinition", routineDefinition) + .add("schema", schema) + .add("securableKind", securableKind) + .add("share", share) + .add("shareId", shareId) + .add("storageLocation", storageLocation) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java index 96bc1c4b3..9ca467d58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A Table in UC as a dependency. */ @Generated +@JsonSerialize(using = DeltaSharingTableDependency.DeltaSharingTableDependencySerializer.class) +@JsonDeserialize(using = DeltaSharingTableDependency.DeltaSharingTableDependencyDeserializer.class) public class DeltaSharingTableDependency { /** */ - @JsonProperty("schema_name") private String schemaName; /** */ - @JsonProperty("table_name") private String tableName; public DeltaSharingTableDependency setSchemaName(String schemaName) { @@ -56,4 +65,43 @@ public String toString() { .add("tableName", tableName) .toString(); } + + DeltaSharingTableDependencyPb toPb() { + DeltaSharingTableDependencyPb pb = new DeltaSharingTableDependencyPb(); + pb.setSchemaName(schemaName); + pb.setTableName(tableName); + + return pb; + } + + static DeltaSharingTableDependency fromPb(DeltaSharingTableDependencyPb pb) { + DeltaSharingTableDependency model = new DeltaSharingTableDependency(); + model.setSchemaName(pb.getSchemaName()); + model.setTableName(pb.getTableName()); + + return model; + } + + public static class DeltaSharingTableDependencySerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSharingTableDependency value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSharingTableDependencyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSharingTableDependencyDeserializer + extends JsonDeserializer { + @Override + public DeltaSharingTableDependency deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSharingTableDependencyPb pb = mapper.readValue(p, DeltaSharingTableDependencyPb.class); + return DeltaSharingTableDependency.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependencyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependencyPb.java new file mode 100755 index 000000000..f0d658da6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependencyPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A Table in UC as a dependency. */ +@Generated +class DeltaSharingTableDependencyPb { + @JsonProperty("schema_name") + private String schemaName; + + @JsonProperty("table_name") + private String tableName; + + public DeltaSharingTableDependencyPb setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public DeltaSharingTableDependencyPb setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingTableDependencyPb that = (DeltaSharingTableDependencyPb) o; + return Objects.equals(schemaName, that.schemaName) && Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(schemaName, tableName); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingTableDependencyPb.class) + .add("schemaName", schemaName) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java index b8f41b300..9f5bd8bea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java @@ -4,36 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = FederationPolicy.FederationPolicySerializer.class) +@JsonDeserialize(using = FederationPolicy.FederationPolicyDeserializer.class) public class FederationPolicy { /** Description of the policy. This is a user-provided description. */ - @JsonProperty("comment") private String comment; /** System-generated timestamp indicating when the policy was created. */ - @JsonProperty("create_time") private String createTime; /** Unique, immutable system-generated identifier for the federation policy. */ - @JsonProperty("id") private String id; /** * Name of the federation policy. A recipient can have multiple policies with different names. The * name must contain only lowercase alphanumeric characters, numbers, and hyphens. */ - @JsonProperty("name") private String name; /** Specifies the policy to use for validating OIDC claims in the federated tokens. */ - @JsonProperty("oidc_policy") private OidcFederationPolicy oidcPolicy; /** System-generated timestamp indicating when the policy was last updated. */ - @JsonProperty("update_time") private String updateTime; public FederationPolicy setComment(String comment) { @@ -119,4 +124,48 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + FederationPolicyPb toPb() { + FederationPolicyPb pb = new FederationPolicyPb(); + pb.setComment(comment); + pb.setCreateTime(createTime); + pb.setId(id); + pb.setName(name); + pb.setOidcPolicy(oidcPolicy); + pb.setUpdateTime(updateTime); + + return pb; + } + + static FederationPolicy fromPb(FederationPolicyPb pb) { + FederationPolicy model = new FederationPolicy(); + model.setComment(pb.getComment()); + model.setCreateTime(pb.getCreateTime()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setOidcPolicy(pb.getOidcPolicy()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class FederationPolicySerializer extends JsonSerializer { + @Override + public void serialize(FederationPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FederationPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FederationPolicyDeserializer extends JsonDeserializer { + @Override + public FederationPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FederationPolicyPb pb = mapper.readValue(p, FederationPolicyPb.class); + return FederationPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicyPb.java new file mode 100755 index 000000000..7bc205ac1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicyPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class FederationPolicyPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("oidc_policy") + private OidcFederationPolicy oidcPolicy; + + @JsonProperty("update_time") + private String updateTime; + + public FederationPolicyPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FederationPolicyPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public FederationPolicyPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public FederationPolicyPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FederationPolicyPb setOidcPolicy(OidcFederationPolicy oidcPolicy) { + this.oidcPolicy = oidcPolicy; + return this; + } + + public OidcFederationPolicy getOidcPolicy() { + return oidcPolicy; + } + + public FederationPolicyPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FederationPolicyPb that = (FederationPolicyPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createTime, that.createTime) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(oidcPolicy, that.oidcPolicy) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash(comment, createTime, id, name, oidcPolicy, updateTime); + } + + @Override + public String toString() { + return new ToStringer(FederationPolicyPb.class) + .add("comment", comment) + .add("createTime", createTime) + .add("id", id) + .add("name", name) + .add("oidcPolicy", oidcPolicy) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java index 81b11e045..c771d8055 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java @@ -4,60 +4,59 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** * Represents a parameter of a function. The same message is used for both input and output columns. */ @Generated +@JsonSerialize(using = FunctionParameterInfo.FunctionParameterInfoSerializer.class) +@JsonDeserialize(using = FunctionParameterInfo.FunctionParameterInfoDeserializer.class) public class FunctionParameterInfo { /** The comment of the parameter. */ - @JsonProperty("comment") private String comment; /** The name of the parameter. */ - @JsonProperty("name") private String name; /** The default value of the parameter. */ - @JsonProperty("parameter_default") private String parameterDefault; /** The mode of the function parameter. */ - @JsonProperty("parameter_mode") private FunctionParameterMode parameterMode; /** The type of the function parameter. */ - @JsonProperty("parameter_type") private FunctionParameterType parameterType; /** The position of the parameter. */ - @JsonProperty("position") private Long position; /** The interval type of the parameter type. */ - @JsonProperty("type_interval_type") private String typeIntervalType; /** The type of the parameter in JSON format. */ - @JsonProperty("type_json") private String typeJson; /** The type of the parameter in Enum format. */ - @JsonProperty("type_name") private ColumnTypeName typeName; /** The precision of the parameter type. */ - @JsonProperty("type_precision") private Long typePrecision; /** The scale of the parameter type. */ - @JsonProperty("type_scale") private Long typeScale; /** The type of the parameter in text format. */ - @JsonProperty("type_text") private String typeText; public FunctionParameterInfo setComment(String comment) { @@ -221,4 +220,63 @@ public String toString() { .add("typeText", typeText) .toString(); } + + FunctionParameterInfoPb toPb() { + FunctionParameterInfoPb pb = new FunctionParameterInfoPb(); + pb.setComment(comment); + pb.setName(name); + pb.setParameterDefault(parameterDefault); + pb.setParameterMode(parameterMode); + pb.setParameterType(parameterType); + pb.setPosition(position); + pb.setTypeIntervalType(typeIntervalType); + pb.setTypeJson(typeJson); + pb.setTypeName(typeName); + pb.setTypePrecision(typePrecision); + pb.setTypeScale(typeScale); + pb.setTypeText(typeText); + + return pb; + } + + static FunctionParameterInfo fromPb(FunctionParameterInfoPb pb) { + FunctionParameterInfo model = new FunctionParameterInfo(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setParameterDefault(pb.getParameterDefault()); + model.setParameterMode(pb.getParameterMode()); + model.setParameterType(pb.getParameterType()); + model.setPosition(pb.getPosition()); + model.setTypeIntervalType(pb.getTypeIntervalType()); + model.setTypeJson(pb.getTypeJson()); + model.setTypeName(pb.getTypeName()); + model.setTypePrecision(pb.getTypePrecision()); + model.setTypeScale(pb.getTypeScale()); + model.setTypeText(pb.getTypeText()); + + return model; + } + + public static class FunctionParameterInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + FunctionParameterInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionParameterInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionParameterInfoDeserializer + extends JsonDeserializer { + @Override + public FunctionParameterInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionParameterInfoPb pb = mapper.readValue(p, FunctionParameterInfoPb.class); + return FunctionParameterInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfoPb.java new file mode 100755 index 000000000..7e7321352 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfoPb.java @@ -0,0 +1,212 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Represents a parameter of a function. The same message is used for both input and output columns. + */ +@Generated +class FunctionParameterInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("name") + private String name; + + @JsonProperty("parameter_default") + private String parameterDefault; + + @JsonProperty("parameter_mode") + private FunctionParameterMode parameterMode; + + @JsonProperty("parameter_type") + private FunctionParameterType parameterType; + + @JsonProperty("position") + private Long position; + + @JsonProperty("type_interval_type") + private String typeIntervalType; + + @JsonProperty("type_json") + private String typeJson; + + @JsonProperty("type_name") + private ColumnTypeName typeName; + + @JsonProperty("type_precision") + private Long typePrecision; + + @JsonProperty("type_scale") + private Long typeScale; + + @JsonProperty("type_text") + private String typeText; + + public FunctionParameterInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FunctionParameterInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FunctionParameterInfoPb setParameterDefault(String parameterDefault) { + this.parameterDefault = parameterDefault; + return this; + } + + public String getParameterDefault() { + return parameterDefault; + } + + public FunctionParameterInfoPb setParameterMode(FunctionParameterMode parameterMode) { + this.parameterMode = parameterMode; + return this; + } + + public FunctionParameterMode getParameterMode() { + return parameterMode; + } + + public FunctionParameterInfoPb setParameterType(FunctionParameterType parameterType) { + this.parameterType = parameterType; + return this; + } + + public FunctionParameterType getParameterType() { + return parameterType; + } + + public FunctionParameterInfoPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public FunctionParameterInfoPb setTypeIntervalType(String typeIntervalType) { + this.typeIntervalType = typeIntervalType; + return this; + } + + public String getTypeIntervalType() { + return typeIntervalType; + } + + public FunctionParameterInfoPb setTypeJson(String typeJson) { + this.typeJson = typeJson; + return this; + } + + public String getTypeJson() { + return typeJson; + } + + public FunctionParameterInfoPb setTypeName(ColumnTypeName typeName) { + this.typeName = typeName; + return this; + } + + public ColumnTypeName getTypeName() { + return typeName; + } + + public FunctionParameterInfoPb setTypePrecision(Long typePrecision) { + this.typePrecision = typePrecision; + return this; + } + + public Long getTypePrecision() { + return typePrecision; + } + + public FunctionParameterInfoPb setTypeScale(Long typeScale) { + this.typeScale = typeScale; + return this; + } + + public Long getTypeScale() { + return typeScale; + } + + public FunctionParameterInfoPb setTypeText(String typeText) { + this.typeText = typeText; + return this; + } + + public String getTypeText() { + return typeText; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfoPb that = (FunctionParameterInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(parameterDefault, that.parameterDefault) + && Objects.equals(parameterMode, that.parameterMode) + && Objects.equals(parameterType, that.parameterType) + && Objects.equals(position, that.position) + && Objects.equals(typeIntervalType, that.typeIntervalType) + && Objects.equals(typeJson, that.typeJson) + && Objects.equals(typeName, that.typeName) + && Objects.equals(typePrecision, that.typePrecision) + && Objects.equals(typeScale, that.typeScale) + && Objects.equals(typeText, that.typeText); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + name, + parameterDefault, + parameterMode, + parameterType, + position, + typeIntervalType, + typeJson, + typeName, + typePrecision, + typeScale, + typeText); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfoPb.class) + .add("comment", comment) + .add("name", name) + .add("parameterDefault", parameterDefault) + .add("parameterMode", parameterMode) + .add("parameterType", parameterType) + .add("position", position) + .add("typeIntervalType", typeIntervalType) + .add("typeJson", typeJson) + .add("typeName", typeName) + .add("typePrecision", typePrecision) + .add("typeScale", typeScale) + .add("typeText", typeText) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java index 3f1217b3e..48feb1625 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = FunctionParameterInfos.FunctionParameterInfosSerializer.class) +@JsonDeserialize(using = FunctionParameterInfos.FunctionParameterInfosDeserializer.class) public class FunctionParameterInfos { /** The list of parameters of the function. */ - @JsonProperty("parameters") private Collection parameters; public FunctionParameterInfos setParameters(Collection parameters) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(FunctionParameterInfos.class).add("parameters", parameters).toString(); } + + FunctionParameterInfosPb toPb() { + FunctionParameterInfosPb pb = new FunctionParameterInfosPb(); + pb.setParameters(parameters); + + return pb; + } + + static FunctionParameterInfos fromPb(FunctionParameterInfosPb pb) { + FunctionParameterInfos model = new FunctionParameterInfos(); + model.setParameters(pb.getParameters()); + + return model; + } + + public static class FunctionParameterInfosSerializer + extends JsonSerializer { + @Override + public void serialize( + FunctionParameterInfos value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + FunctionParameterInfosPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class FunctionParameterInfosDeserializer + extends JsonDeserializer { + @Override + public FunctionParameterInfos deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + FunctionParameterInfosPb pb = mapper.readValue(p, FunctionParameterInfosPb.class); + return FunctionParameterInfos.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfosPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfosPb.java new file mode 100755 index 000000000..ad9c49d5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfosPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class FunctionParameterInfosPb { + @JsonProperty("parameters") + private Collection parameters; + + public FunctionParameterInfosPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfosPb that = (FunctionParameterInfosPb) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfosPb.class).add("parameters", parameters).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java index d98db7184..1b811b4e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a share activation URL */ @Generated +@JsonSerialize(using = GetActivationUrlInfoRequest.GetActivationUrlInfoRequestSerializer.class) +@JsonDeserialize(using = GetActivationUrlInfoRequest.GetActivationUrlInfoRequestDeserializer.class) public class GetActivationUrlInfoRequest { /** The one time activation url. It also accepts activation token. */ - @JsonIgnore private String activationUrl; + private String activationUrl; public GetActivationUrlInfoRequest setActivationUrl(String activationUrl) { this.activationUrl = activationUrl; @@ -41,4 +52,41 @@ public String toString() { .add("activationUrl", activationUrl) .toString(); } + + GetActivationUrlInfoRequestPb toPb() { + GetActivationUrlInfoRequestPb pb = new GetActivationUrlInfoRequestPb(); + pb.setActivationUrl(activationUrl); + + return pb; + } + + static GetActivationUrlInfoRequest fromPb(GetActivationUrlInfoRequestPb pb) { + GetActivationUrlInfoRequest model = new GetActivationUrlInfoRequest(); + model.setActivationUrl(pb.getActivationUrl()); + + return model; + } + + public static class GetActivationUrlInfoRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetActivationUrlInfoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetActivationUrlInfoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetActivationUrlInfoRequestDeserializer + extends JsonDeserializer { + @Override + public GetActivationUrlInfoRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetActivationUrlInfoRequestPb pb = mapper.readValue(p, GetActivationUrlInfoRequestPb.class); + return GetActivationUrlInfoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequestPb.java new file mode 100755 index 000000000..9f9c3f663 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a share activation URL */ +@Generated +class GetActivationUrlInfoRequestPb { + @JsonIgnore private String activationUrl; + + public GetActivationUrlInfoRequestPb setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetActivationUrlInfoRequestPb that = (GetActivationUrlInfoRequestPb) o; + return Objects.equals(activationUrl, that.activationUrl); + } + + @Override + public int hashCode() { + return Objects.hash(activationUrl); + } + + @Override + public String toString() { + return new ToStringer(GetActivationUrlInfoRequestPb.class) + .add("activationUrl", activationUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java index 3b88fa15e..6220a1a9b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java @@ -4,9 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetActivationUrlInfoResponse.GetActivationUrlInfoResponseSerializer.class) +@JsonDeserialize( + using = GetActivationUrlInfoResponse.GetActivationUrlInfoResponseDeserializer.class) public class GetActivationUrlInfoResponse { @Override @@ -25,4 +38,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetActivationUrlInfoResponse.class).toString(); } + + GetActivationUrlInfoResponsePb toPb() { + GetActivationUrlInfoResponsePb pb = new GetActivationUrlInfoResponsePb(); + + return pb; + } + + static GetActivationUrlInfoResponse fromPb(GetActivationUrlInfoResponsePb pb) { + GetActivationUrlInfoResponse model = new GetActivationUrlInfoResponse(); + + return model; + } + + public static class GetActivationUrlInfoResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetActivationUrlInfoResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetActivationUrlInfoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetActivationUrlInfoResponseDeserializer + extends JsonDeserializer { + @Override + public GetActivationUrlInfoResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetActivationUrlInfoResponsePb pb = mapper.readValue(p, GetActivationUrlInfoResponsePb.class); + return GetActivationUrlInfoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponsePb.java new file mode 100755 index 000000000..74d1f58fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class GetActivationUrlInfoResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(GetActivationUrlInfoResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java index 88320e5e1..41a05357e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java @@ -4,20 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get recipient federation policy */ @Generated +@JsonSerialize(using = GetFederationPolicyRequest.GetFederationPolicyRequestSerializer.class) +@JsonDeserialize(using = GetFederationPolicyRequest.GetFederationPolicyRequestDeserializer.class) public class GetFederationPolicyRequest { /** Name of the policy. This is the name of the policy to be retrieved. */ - @JsonIgnore private String name; + private String name; /** * Name of the recipient. This is the name of the recipient for which the policy is being * retrieved. */ - @JsonIgnore private String recipientName; + private String recipientName; public GetFederationPolicyRequest setName(String name) { this.name = name; @@ -57,4 +68,43 @@ public String toString() { .add("recipientName", recipientName) .toString(); } + + GetFederationPolicyRequestPb toPb() { + GetFederationPolicyRequestPb pb = new GetFederationPolicyRequestPb(); + pb.setName(name); + pb.setRecipientName(recipientName); + + return pb; + } + + static GetFederationPolicyRequest fromPb(GetFederationPolicyRequestPb pb) { + GetFederationPolicyRequest model = new GetFederationPolicyRequest(); + model.setName(pb.getName()); + model.setRecipientName(pb.getRecipientName()); + + return model; + } + + public static class GetFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public GetFederationPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetFederationPolicyRequestPb pb = mapper.readValue(p, GetFederationPolicyRequestPb.class); + return GetFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequestPb.java new file mode 100755 index 000000000..e3feb97df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get recipient federation policy */ +@Generated +class GetFederationPolicyRequestPb { + @JsonIgnore private String name; + + @JsonIgnore private String recipientName; + + public GetFederationPolicyRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetFederationPolicyRequestPb setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFederationPolicyRequestPb that = (GetFederationPolicyRequestPb) o; + return Objects.equals(name, that.name) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(name, recipientName); + } + + @Override + public String toString() { + return new ToStringer(GetFederationPolicyRequestPb.class) + .add("name", name) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java index 7f27c8335..628e313e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a provider */ @Generated +@JsonSerialize(using = GetProviderRequest.GetProviderRequestSerializer.class) +@JsonDeserialize(using = GetProviderRequest.GetProviderRequestDeserializer.class) public class GetProviderRequest { /** Name of the provider. */ - @JsonIgnore private String name; + private String name; public GetProviderRequest setName(String name) { this.name = name; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetProviderRequest.class).add("name", name).toString(); } + + GetProviderRequestPb toPb() { + GetProviderRequestPb pb = new GetProviderRequestPb(); + pb.setName(name); + + return pb; + } + + static GetProviderRequest fromPb(GetProviderRequestPb pb) { + GetProviderRequest model = new GetProviderRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetProviderRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetProviderRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetProviderRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetProviderRequestDeserializer extends JsonDeserializer { + @Override + public GetProviderRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetProviderRequestPb pb = mapper.readValue(p, GetProviderRequestPb.class); + return GetProviderRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequestPb.java new file mode 100755 index 000000000..bd16a5360 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a provider */ +@Generated +class GetProviderRequestPb { + @JsonIgnore private String name; + + public GetProviderRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProviderRequestPb that = (GetProviderRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetProviderRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java index 1fafaab1a..7ba3d5455 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a share recipient */ @Generated +@JsonSerialize(using = GetRecipientRequest.GetRecipientRequestSerializer.class) +@JsonDeserialize(using = GetRecipientRequest.GetRecipientRequestDeserializer.class) public class GetRecipientRequest { /** Name of the recipient. */ - @JsonIgnore private String name; + private String name; public GetRecipientRequest setName(String name) { this.name = name; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetRecipientRequest.class).add("name", name).toString(); } + + GetRecipientRequestPb toPb() { + GetRecipientRequestPb pb = new GetRecipientRequestPb(); + pb.setName(name); + + return pb; + } + + static GetRecipientRequest fromPb(GetRecipientRequestPb pb) { + GetRecipientRequest model = new GetRecipientRequest(); + model.setName(pb.getName()); + + return model; + } + + public static class GetRecipientRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRecipientRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRecipientRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRecipientRequestDeserializer + extends JsonDeserializer { + @Override + public GetRecipientRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRecipientRequestPb pb = mapper.readValue(p, GetRecipientRequestPb.class); + return GetRecipientRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequestPb.java new file mode 100755 index 000000000..2464cc17e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a share recipient */ +@Generated +class GetRecipientRequestPb { + @JsonIgnore private String name; + + public GetRecipientRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRecipientRequestPb that = (GetRecipientRequestPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetRecipientRequestPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java index 929f88b04..4b704fba8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java @@ -4,21 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetRecipientSharePermissionsResponse.GetRecipientSharePermissionsResponseSerializer.class) +@JsonDeserialize( + using = + GetRecipientSharePermissionsResponse.GetRecipientSharePermissionsResponseDeserializer.class) public class GetRecipientSharePermissionsResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of data share permissions for a recipient. */ - @JsonProperty("permissions_out") private Collection permissionsOut; public GetRecipientSharePermissionsResponse setNextPageToken(String nextPageToken) { @@ -61,4 +74,44 @@ public String toString() { .add("permissionsOut", permissionsOut) .toString(); } + + GetRecipientSharePermissionsResponsePb toPb() { + GetRecipientSharePermissionsResponsePb pb = new GetRecipientSharePermissionsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPermissionsOut(permissionsOut); + + return pb; + } + + static GetRecipientSharePermissionsResponse fromPb(GetRecipientSharePermissionsResponsePb pb) { + GetRecipientSharePermissionsResponse model = new GetRecipientSharePermissionsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPermissionsOut(pb.getPermissionsOut()); + + return model; + } + + public static class GetRecipientSharePermissionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRecipientSharePermissionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRecipientSharePermissionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRecipientSharePermissionsResponseDeserializer + extends JsonDeserializer { + @Override + public GetRecipientSharePermissionsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRecipientSharePermissionsResponsePb pb = + mapper.readValue(p, GetRecipientSharePermissionsResponsePb.class); + return GetRecipientSharePermissionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponsePb.java new file mode 100755 index 000000000..c5f4805d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetRecipientSharePermissionsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("permissions_out") + private Collection permissionsOut; + + public GetRecipientSharePermissionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetRecipientSharePermissionsResponsePb setPermissionsOut( + Collection permissionsOut) { + this.permissionsOut = permissionsOut; + return this; + } + + public Collection getPermissionsOut() { + return permissionsOut; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRecipientSharePermissionsResponsePb that = (GetRecipientSharePermissionsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(permissionsOut, that.permissionsOut); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, permissionsOut); + } + + @Override + public String toString() { + return new ToStringer(GetRecipientSharePermissionsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("permissionsOut", permissionsOut) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java index d635d5575..a61f86c35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetSharePermissionsResponse.GetSharePermissionsResponseSerializer.class) +@JsonDeserialize(using = GetSharePermissionsResponse.GetSharePermissionsResponseDeserializer.class) public class GetSharePermissionsResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** The privileges assigned to each principal */ - @JsonProperty("privilege_assignments") private Collection privilegeAssignments; public GetSharePermissionsResponse setNextPageToken(String nextPageToken) { @@ -61,4 +70,43 @@ public String toString() { .add("privilegeAssignments", privilegeAssignments) .toString(); } + + GetSharePermissionsResponsePb toPb() { + GetSharePermissionsResponsePb pb = new GetSharePermissionsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPrivilegeAssignments(privilegeAssignments); + + return pb; + } + + static GetSharePermissionsResponse fromPb(GetSharePermissionsResponsePb pb) { + GetSharePermissionsResponse model = new GetSharePermissionsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPrivilegeAssignments(pb.getPrivilegeAssignments()); + + return model; + } + + public static class GetSharePermissionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetSharePermissionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSharePermissionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSharePermissionsResponseDeserializer + extends JsonDeserializer { + @Override + public GetSharePermissionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSharePermissionsResponsePb pb = mapper.readValue(p, GetSharePermissionsResponsePb.class); + return GetSharePermissionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponsePb.java new file mode 100755 index 000000000..3f2430f85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponsePb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetSharePermissionsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public GetSharePermissionsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetSharePermissionsResponsePb setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSharePermissionsResponsePb that = (GetSharePermissionsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(GetSharePermissionsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java index 1cd3ac93f..5fc6b079b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a share */ @Generated +@JsonSerialize(using = GetShareRequest.GetShareRequestSerializer.class) +@JsonDeserialize(using = GetShareRequest.GetShareRequestDeserializer.class) public class GetShareRequest { /** Query for data to include in the share. */ - @JsonIgnore - @QueryParam("include_shared_data") private Boolean includeSharedData; /** The name of the share. */ - @JsonIgnore private String name; + private String name; public GetShareRequest setIncludeSharedData(Boolean includeSharedData) { this.includeSharedData = includeSharedData; @@ -58,4 +66,40 @@ public String toString() { .add("name", name) .toString(); } + + GetShareRequestPb toPb() { + GetShareRequestPb pb = new GetShareRequestPb(); + pb.setIncludeSharedData(includeSharedData); + pb.setName(name); + + return pb; + } + + static GetShareRequest fromPb(GetShareRequestPb pb) { + GetShareRequest model = new GetShareRequest(); + model.setIncludeSharedData(pb.getIncludeSharedData()); + model.setName(pb.getName()); + + return model; + } + + public static class GetShareRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetShareRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetShareRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetShareRequestDeserializer extends JsonDeserializer { + @Override + public GetShareRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetShareRequestPb pb = mapper.readValue(p, GetShareRequestPb.class); + return GetShareRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequestPb.java new file mode 100755 index 000000000..ec196af20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a share */ +@Generated +class GetShareRequestPb { + @JsonIgnore + @QueryParam("include_shared_data") + private Boolean includeSharedData; + + @JsonIgnore private String name; + + public GetShareRequestPb setIncludeSharedData(Boolean includeSharedData) { + this.includeSharedData = includeSharedData; + return this; + } + + public Boolean getIncludeSharedData() { + return includeSharedData; + } + + public GetShareRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetShareRequestPb that = (GetShareRequestPb) o; + return Objects.equals(includeSharedData, that.includeSharedData) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(includeSharedData, name); + } + + @Override + public String toString() { + return new ToStringer(GetShareRequestPb.class) + .add("includeSharedData", includeSharedData) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java index 656c48451..8ed69ea55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = IpAccessList.IpAccessListSerializer.class) +@JsonDeserialize(using = IpAccessList.IpAccessListDeserializer.class) public class IpAccessList { /** Allowed IP Addresses in CIDR notation. Limit of 100. */ - @JsonProperty("allowed_ip_addresses") private Collection allowedIpAddresses; public IpAccessList setAllowedIpAddresses(Collection allowedIpAddresses) { @@ -42,4 +52,37 @@ public String toString() { .add("allowedIpAddresses", allowedIpAddresses) .toString(); } + + IpAccessListPb toPb() { + IpAccessListPb pb = new IpAccessListPb(); + pb.setAllowedIpAddresses(allowedIpAddresses); + + return pb; + } + + static IpAccessList fromPb(IpAccessListPb pb) { + IpAccessList model = new IpAccessList(); + model.setAllowedIpAddresses(pb.getAllowedIpAddresses()); + + return model; + } + + public static class IpAccessListSerializer extends JsonSerializer { + @Override + public void serialize(IpAccessList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + IpAccessListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class IpAccessListDeserializer extends JsonDeserializer { + @Override + public IpAccessList deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + IpAccessListPb pb = mapper.readValue(p, IpAccessListPb.class); + return IpAccessList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessListPb.java new file mode 100755 index 000000000..c4a1268c8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessListPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class IpAccessListPb { + @JsonProperty("allowed_ip_addresses") + private Collection allowedIpAddresses; + + public IpAccessListPb setAllowedIpAddresses(Collection allowedIpAddresses) { + this.allowedIpAddresses = allowedIpAddresses; + return this; + } + + public Collection getAllowedIpAddresses() { + return allowedIpAddresses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IpAccessListPb that = (IpAccessListPb) o; + return Objects.equals(allowedIpAddresses, that.allowedIpAddresses); + } + + @Override + public int hashCode() { + return Objects.hash(allowedIpAddresses); + } + + @Override + public String toString() { + return new ToStringer(IpAccessListPb.class) + .add("allowedIpAddresses", allowedIpAddresses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java index dcd317716..49e77d163 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java @@ -3,29 +3,36 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List recipient federation policies */ @Generated +@JsonSerialize(using = ListFederationPoliciesRequest.ListFederationPoliciesRequestSerializer.class) +@JsonDeserialize( + using = ListFederationPoliciesRequest.ListFederationPoliciesRequestDeserializer.class) public class ListFederationPoliciesRequest { /** */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; /** * Name of the recipient. This is the name of the recipient for which the policies are being * listed. */ - @JsonIgnore private String recipientName; + private String recipientName; public ListFederationPoliciesRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; @@ -77,4 +84,46 @@ public String toString() { .add("recipientName", recipientName) .toString(); } + + ListFederationPoliciesRequestPb toPb() { + ListFederationPoliciesRequestPb pb = new ListFederationPoliciesRequestPb(); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + pb.setRecipientName(recipientName); + + return pb; + } + + static ListFederationPoliciesRequest fromPb(ListFederationPoliciesRequestPb pb) { + ListFederationPoliciesRequest model = new ListFederationPoliciesRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + model.setRecipientName(pb.getRecipientName()); + + return model; + } + + public static class ListFederationPoliciesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFederationPoliciesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFederationPoliciesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFederationPoliciesRequestDeserializer + extends JsonDeserializer { + @Override + public ListFederationPoliciesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFederationPoliciesRequestPb pb = + mapper.readValue(p, ListFederationPoliciesRequestPb.class); + return ListFederationPoliciesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequestPb.java new file mode 100755 index 000000000..32a522672 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List recipient federation policies */ +@Generated +class ListFederationPoliciesRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + @JsonIgnore private String recipientName; + + public ListFederationPoliciesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListFederationPoliciesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListFederationPoliciesRequestPb setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFederationPoliciesRequestPb that = (ListFederationPoliciesRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, recipientName); + } + + @Override + public String toString() { + return new ToStringer(ListFederationPoliciesRequestPb.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java index e91353bb2..ec0153854 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListFederationPoliciesResponse.ListFederationPoliciesResponseSerializer.class) +@JsonDeserialize( + using = ListFederationPoliciesResponse.ListFederationPoliciesResponseDeserializer.class) public class ListFederationPoliciesResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("policies") private Collection policies; public ListFederationPoliciesResponse setNextPageToken(String nextPageToken) { @@ -57,4 +68,44 @@ public String toString() { .add("policies", policies) .toString(); } + + ListFederationPoliciesResponsePb toPb() { + ListFederationPoliciesResponsePb pb = new ListFederationPoliciesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setPolicies(policies); + + return pb; + } + + static ListFederationPoliciesResponse fromPb(ListFederationPoliciesResponsePb pb) { + ListFederationPoliciesResponse model = new ListFederationPoliciesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPolicies(pb.getPolicies()); + + return model; + } + + public static class ListFederationPoliciesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListFederationPoliciesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListFederationPoliciesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListFederationPoliciesResponseDeserializer + extends JsonDeserializer { + @Override + public ListFederationPoliciesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListFederationPoliciesResponsePb pb = + mapper.readValue(p, ListFederationPoliciesResponsePb.class); + return ListFederationPoliciesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponsePb.java new file mode 100755 index 000000000..fffedc33a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListFederationPoliciesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("policies") + private Collection policies; + + public ListFederationPoliciesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListFederationPoliciesResponsePb setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFederationPoliciesResponsePb that = (ListFederationPoliciesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policies, that.policies); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policies); + } + + @Override + public String toString() { + return new ToStringer(ListFederationPoliciesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("policies", policies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java index fea546dc1..7311ee5bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java @@ -3,38 +3,42 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List assets by provider share */ @Generated +@JsonSerialize( + using = ListProviderShareAssetsRequest.ListProviderShareAssetsRequestSerializer.class) +@JsonDeserialize( + using = ListProviderShareAssetsRequest.ListProviderShareAssetsRequestDeserializer.class) public class ListProviderShareAssetsRequest { /** Maximum number of functions to return. */ - @JsonIgnore - @QueryParam("function_max_results") private Long functionMaxResults; /** Maximum number of notebooks to return. */ - @JsonIgnore - @QueryParam("notebook_max_results") private Long notebookMaxResults; /** The name of the provider who owns the share. */ - @JsonIgnore private String providerName; + private String providerName; /** The name of the share. */ - @JsonIgnore private String shareName; + private String shareName; /** Maximum number of tables to return. */ - @JsonIgnore - @QueryParam("table_max_results") private Long tableMaxResults; /** Maximum number of volumes to return. */ - @JsonIgnore - @QueryParam("volume_max_results") private Long volumeMaxResults; public ListProviderShareAssetsRequest setFunctionMaxResults(Long functionMaxResults) { @@ -126,4 +130,52 @@ public String toString() { .add("volumeMaxResults", volumeMaxResults) .toString(); } + + ListProviderShareAssetsRequestPb toPb() { + ListProviderShareAssetsRequestPb pb = new ListProviderShareAssetsRequestPb(); + pb.setFunctionMaxResults(functionMaxResults); + pb.setNotebookMaxResults(notebookMaxResults); + pb.setProviderName(providerName); + pb.setShareName(shareName); + pb.setTableMaxResults(tableMaxResults); + pb.setVolumeMaxResults(volumeMaxResults); + + return pb; + } + + static ListProviderShareAssetsRequest fromPb(ListProviderShareAssetsRequestPb pb) { + ListProviderShareAssetsRequest model = new ListProviderShareAssetsRequest(); + model.setFunctionMaxResults(pb.getFunctionMaxResults()); + model.setNotebookMaxResults(pb.getNotebookMaxResults()); + model.setProviderName(pb.getProviderName()); + model.setShareName(pb.getShareName()); + model.setTableMaxResults(pb.getTableMaxResults()); + model.setVolumeMaxResults(pb.getVolumeMaxResults()); + + return model; + } + + public static class ListProviderShareAssetsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProviderShareAssetsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProviderShareAssetsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProviderShareAssetsRequestDeserializer + extends JsonDeserializer { + @Override + public ListProviderShareAssetsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProviderShareAssetsRequestPb pb = + mapper.readValue(p, ListProviderShareAssetsRequestPb.class); + return ListProviderShareAssetsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequestPb.java new file mode 100755 index 000000000..c54ec3475 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequestPb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List assets by provider share */ +@Generated +class ListProviderShareAssetsRequestPb { + @JsonIgnore + @QueryParam("function_max_results") + private Long functionMaxResults; + + @JsonIgnore + @QueryParam("notebook_max_results") + private Long notebookMaxResults; + + @JsonIgnore private String providerName; + + @JsonIgnore private String shareName; + + @JsonIgnore + @QueryParam("table_max_results") + private Long tableMaxResults; + + @JsonIgnore + @QueryParam("volume_max_results") + private Long volumeMaxResults; + + public ListProviderShareAssetsRequestPb setFunctionMaxResults(Long functionMaxResults) { + this.functionMaxResults = functionMaxResults; + return this; + } + + public Long getFunctionMaxResults() { + return functionMaxResults; + } + + public ListProviderShareAssetsRequestPb setNotebookMaxResults(Long notebookMaxResults) { + this.notebookMaxResults = notebookMaxResults; + return this; + } + + public Long getNotebookMaxResults() { + return notebookMaxResults; + } + + public ListProviderShareAssetsRequestPb setProviderName(String providerName) { + this.providerName = providerName; + return this; + } + + public String getProviderName() { + return providerName; + } + + public ListProviderShareAssetsRequestPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public ListProviderShareAssetsRequestPb setTableMaxResults(Long tableMaxResults) { + this.tableMaxResults = tableMaxResults; + return this; + } + + public Long getTableMaxResults() { + return tableMaxResults; + } + + public ListProviderShareAssetsRequestPb setVolumeMaxResults(Long volumeMaxResults) { + this.volumeMaxResults = volumeMaxResults; + return this; + } + + public Long getVolumeMaxResults() { + return volumeMaxResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderShareAssetsRequestPb that = (ListProviderShareAssetsRequestPb) o; + return Objects.equals(functionMaxResults, that.functionMaxResults) + && Objects.equals(notebookMaxResults, that.notebookMaxResults) + && Objects.equals(providerName, that.providerName) + && Objects.equals(shareName, that.shareName) + && Objects.equals(tableMaxResults, that.tableMaxResults) + && Objects.equals(volumeMaxResults, that.volumeMaxResults); + } + + @Override + public int hashCode() { + return Objects.hash( + functionMaxResults, + notebookMaxResults, + providerName, + shareName, + tableMaxResults, + volumeMaxResults); + } + + @Override + public String toString() { + return new ToStringer(ListProviderShareAssetsRequestPb.class) + .add("functionMaxResults", functionMaxResults) + .add("notebookMaxResults", notebookMaxResults) + .add("providerName", providerName) + .add("shareName", shareName) + .add("tableMaxResults", tableMaxResults) + .add("volumeMaxResults", volumeMaxResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java index aca64ba2d..feeb55b67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java @@ -4,27 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Response to ListProviderShareAssets, which contains the list of assets of a share. */ @Generated +@JsonSerialize( + using = ListProviderShareAssetsResponse.ListProviderShareAssetsResponseSerializer.class) +@JsonDeserialize( + using = ListProviderShareAssetsResponse.ListProviderShareAssetsResponseDeserializer.class) public class ListProviderShareAssetsResponse { /** The list of functions in the share. */ - @JsonProperty("functions") private Collection functions; /** The list of notebooks in the share. */ - @JsonProperty("notebooks") private Collection notebooks; /** The list of tables in the share. */ - @JsonProperty("tables") private Collection tables; /** The list of volumes in the share. */ - @JsonProperty("volumes") private Collection volumes; public ListProviderShareAssetsResponse setFunctions(Collection functions) { @@ -88,4 +97,48 @@ public String toString() { .add("volumes", volumes) .toString(); } + + ListProviderShareAssetsResponsePb toPb() { + ListProviderShareAssetsResponsePb pb = new ListProviderShareAssetsResponsePb(); + pb.setFunctions(functions); + pb.setNotebooks(notebooks); + pb.setTables(tables); + pb.setVolumes(volumes); + + return pb; + } + + static ListProviderShareAssetsResponse fromPb(ListProviderShareAssetsResponsePb pb) { + ListProviderShareAssetsResponse model = new ListProviderShareAssetsResponse(); + model.setFunctions(pb.getFunctions()); + model.setNotebooks(pb.getNotebooks()); + model.setTables(pb.getTables()); + model.setVolumes(pb.getVolumes()); + + return model; + } + + public static class ListProviderShareAssetsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProviderShareAssetsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProviderShareAssetsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProviderShareAssetsResponseDeserializer + extends JsonDeserializer { + @Override + public ListProviderShareAssetsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProviderShareAssetsResponsePb pb = + mapper.readValue(p, ListProviderShareAssetsResponsePb.class); + return ListProviderShareAssetsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponsePb.java new file mode 100755 index 000000000..60fc86368 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponsePb.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response to ListProviderShareAssets, which contains the list of assets of a share. */ +@Generated +class ListProviderShareAssetsResponsePb { + @JsonProperty("functions") + private Collection functions; + + @JsonProperty("notebooks") + private Collection notebooks; + + @JsonProperty("tables") + private Collection
tables; + + @JsonProperty("volumes") + private Collection volumes; + + public ListProviderShareAssetsResponsePb setFunctions( + Collection functions) { + this.functions = functions; + return this; + } + + public Collection getFunctions() { + return functions; + } + + public ListProviderShareAssetsResponsePb setNotebooks(Collection notebooks) { + this.notebooks = notebooks; + return this; + } + + public Collection getNotebooks() { + return notebooks; + } + + public ListProviderShareAssetsResponsePb setTables(Collection
tables) { + this.tables = tables; + return this; + } + + public Collection
getTables() { + return tables; + } + + public ListProviderShareAssetsResponsePb setVolumes(Collection volumes) { + this.volumes = volumes; + return this; + } + + public Collection getVolumes() { + return volumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderShareAssetsResponsePb that = (ListProviderShareAssetsResponsePb) o; + return Objects.equals(functions, that.functions) + && Objects.equals(notebooks, that.notebooks) + && Objects.equals(tables, that.tables) + && Objects.equals(volumes, that.volumes); + } + + @Override + public int hashCode() { + return Objects.hash(functions, notebooks, tables, volumes); + } + + @Override + public String toString() { + return new ToStringer(ListProviderShareAssetsResponsePb.class) + .add("functions", functions) + .add("notebooks", notebooks) + .add("tables", tables) + .add("volumes", volumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java index cb8bdfcde..6deaef24b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListProviderSharesResponse.ListProviderSharesResponseSerializer.class) +@JsonDeserialize(using = ListProviderSharesResponse.ListProviderSharesResponseDeserializer.class) public class ListProviderSharesResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of provider shares. */ - @JsonProperty("shares") private Collection shares; public ListProviderSharesResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,43 @@ public String toString() { .add("shares", shares) .toString(); } + + ListProviderSharesResponsePb toPb() { + ListProviderSharesResponsePb pb = new ListProviderSharesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setShares(shares); + + return pb; + } + + static ListProviderSharesResponse fromPb(ListProviderSharesResponsePb pb) { + ListProviderSharesResponse model = new ListProviderSharesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setShares(pb.getShares()); + + return model; + } + + public static class ListProviderSharesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProviderSharesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProviderSharesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProviderSharesResponseDeserializer + extends JsonDeserializer { + @Override + public ListProviderSharesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProviderSharesResponsePb pb = mapper.readValue(p, ListProviderSharesResponsePb.class); + return ListProviderSharesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponsePb.java new file mode 100755 index 000000000..dfe509d61 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListProviderSharesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("shares") + private Collection shares; + + public ListProviderSharesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListProviderSharesResponsePb setShares(Collection shares) { + this.shares = shares; + return this; + } + + public Collection getShares() { + return shares; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderSharesResponsePb that = (ListProviderSharesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(shares, that.shares); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, shares); + } + + @Override + public String toString() { + return new ToStringer(ListProviderSharesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("shares", shares) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java index e33e8ca7b..9758562db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List providers */ @Generated +@JsonSerialize(using = ListProvidersRequest.ListProvidersRequestSerializer.class) +@JsonDeserialize(using = ListProvidersRequest.ListProvidersRequestDeserializer.class) public class ListProvidersRequest { /** * If not provided, all providers will be returned. If no providers exist with this ID, no results * will be returned. */ - @JsonIgnore - @QueryParam("data_provider_global_metastore_id") private String dataProviderGlobalMetastoreId; /** @@ -28,13 +36,9 @@ public class ListProvidersRequest { * max_results size, even zero. The only definitive indication that no further providers can be * fetched is when the next_page_token is unset from the response. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListProvidersRequest setDataProviderGlobalMetastoreId( @@ -88,4 +92,44 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListProvidersRequestPb toPb() { + ListProvidersRequestPb pb = new ListProvidersRequestPb(); + pb.setDataProviderGlobalMetastoreId(dataProviderGlobalMetastoreId); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListProvidersRequest fromPb(ListProvidersRequestPb pb) { + ListProvidersRequest model = new ListProvidersRequest(); + model.setDataProviderGlobalMetastoreId(pb.getDataProviderGlobalMetastoreId()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListProvidersRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListProvidersRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProvidersRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProvidersRequestDeserializer + extends JsonDeserializer { + @Override + public ListProvidersRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProvidersRequestPb pb = mapper.readValue(p, ListProvidersRequestPb.class); + return ListProvidersRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequestPb.java new file mode 100755 index 000000000..285624f0f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequestPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List providers */ +@Generated +class ListProvidersRequestPb { + @JsonIgnore + @QueryParam("data_provider_global_metastore_id") + private String dataProviderGlobalMetastoreId; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListProvidersRequestPb setDataProviderGlobalMetastoreId( + String dataProviderGlobalMetastoreId) { + this.dataProviderGlobalMetastoreId = dataProviderGlobalMetastoreId; + return this; + } + + public String getDataProviderGlobalMetastoreId() { + return dataProviderGlobalMetastoreId; + } + + public ListProvidersRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListProvidersRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersRequestPb that = (ListProvidersRequestPb) o; + return Objects.equals(dataProviderGlobalMetastoreId, that.dataProviderGlobalMetastoreId) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(dataProviderGlobalMetastoreId, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersRequestPb.class) + .add("dataProviderGlobalMetastoreId", dataProviderGlobalMetastoreId) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java index 5c1692ebb..e2897d537 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListProvidersResponse.ListProvidersResponseSerializer.class) +@JsonDeserialize(using = ListProvidersResponse.ListProvidersResponseDeserializer.class) public class ListProvidersResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of provider information objects. */ - @JsonProperty("providers") private Collection providers; public ListProvidersResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("providers", providers) .toString(); } + + ListProvidersResponsePb toPb() { + ListProvidersResponsePb pb = new ListProvidersResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setProviders(providers); + + return pb; + } + + static ListProvidersResponse fromPb(ListProvidersResponsePb pb) { + ListProvidersResponse model = new ListProvidersResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setProviders(pb.getProviders()); + + return model; + } + + public static class ListProvidersResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListProvidersResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListProvidersResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListProvidersResponseDeserializer + extends JsonDeserializer { + @Override + public ListProvidersResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListProvidersResponsePb pb = mapper.readValue(p, ListProvidersResponsePb.class); + return ListProvidersResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponsePb.java new file mode 100755 index 000000000..242f2edcd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListProvidersResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("providers") + private Collection providers; + + public ListProvidersResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListProvidersResponsePb setProviders(Collection providers) { + this.providers = providers; + return this; + } + + public Collection getProviders() { + return providers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersResponsePb that = (ListProvidersResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(providers, that.providers); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, providers); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("providers", providers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequest.java index 1064eede3..6270e262a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List share recipients */ @Generated +@JsonSerialize(using = ListRecipientsRequest.ListRecipientsRequestSerializer.class) +@JsonDeserialize(using = ListRecipientsRequest.ListRecipientsRequestDeserializer.class) public class ListRecipientsRequest { /** * If not provided, all recipients will be returned. If no recipients exist with this ID, no * results will be returned. */ - @JsonIgnore - @QueryParam("data_recipient_global_metastore_id") private String dataRecipientGlobalMetastoreId; /** @@ -28,13 +36,9 @@ public class ListRecipientsRequest { * max_results size, even zero. The only definitive indication that no further recipients can be * fetched is when the next_page_token is unset from the response. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListRecipientsRequest setDataRecipientGlobalMetastoreId( @@ -88,4 +92,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListRecipientsRequestPb toPb() { + ListRecipientsRequestPb pb = new ListRecipientsRequestPb(); + pb.setDataRecipientGlobalMetastoreId(dataRecipientGlobalMetastoreId); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListRecipientsRequest fromPb(ListRecipientsRequestPb pb) { + ListRecipientsRequest model = new ListRecipientsRequest(); + model.setDataRecipientGlobalMetastoreId(pb.getDataRecipientGlobalMetastoreId()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListRecipientsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListRecipientsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRecipientsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRecipientsRequestDeserializer + extends JsonDeserializer { + @Override + public ListRecipientsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRecipientsRequestPb pb = mapper.readValue(p, ListRecipientsRequestPb.class); + return ListRecipientsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequestPb.java new file mode 100755 index 000000000..a66d193c6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsRequestPb.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List share recipients */ +@Generated +class ListRecipientsRequestPb { + @JsonIgnore + @QueryParam("data_recipient_global_metastore_id") + private String dataRecipientGlobalMetastoreId; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListRecipientsRequestPb setDataRecipientGlobalMetastoreId( + String dataRecipientGlobalMetastoreId) { + this.dataRecipientGlobalMetastoreId = dataRecipientGlobalMetastoreId; + return this; + } + + public String getDataRecipientGlobalMetastoreId() { + return dataRecipientGlobalMetastoreId; + } + + public ListRecipientsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListRecipientsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRecipientsRequestPb that = (ListRecipientsRequestPb) o; + return Objects.equals(dataRecipientGlobalMetastoreId, that.dataRecipientGlobalMetastoreId) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(dataRecipientGlobalMetastoreId, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListRecipientsRequestPb.class) + .add("dataRecipientGlobalMetastoreId", dataRecipientGlobalMetastoreId) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponse.java index 2d0ece97f..07933742c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListRecipientsResponse.ListRecipientsResponseSerializer.class) +@JsonDeserialize(using = ListRecipientsResponse.ListRecipientsResponseDeserializer.class) public class ListRecipientsResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of recipient information objects. */ - @JsonProperty("recipients") private Collection recipients; public ListRecipientsResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("recipients", recipients) .toString(); } + + ListRecipientsResponsePb toPb() { + ListRecipientsResponsePb pb = new ListRecipientsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRecipients(recipients); + + return pb; + } + + static ListRecipientsResponse fromPb(ListRecipientsResponsePb pb) { + ListRecipientsResponse model = new ListRecipientsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRecipients(pb.getRecipients()); + + return model; + } + + public static class ListRecipientsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListRecipientsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListRecipientsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListRecipientsResponseDeserializer + extends JsonDeserializer { + @Override + public ListRecipientsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListRecipientsResponsePb pb = mapper.readValue(p, ListRecipientsResponsePb.class); + return ListRecipientsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponsePb.java new file mode 100755 index 000000000..7e633e229 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListRecipientsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("recipients") + private Collection recipients; + + public ListRecipientsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListRecipientsResponsePb setRecipients(Collection recipients) { + this.recipients = recipients; + return this; + } + + public Collection getRecipients() { + return recipients; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRecipientsResponsePb that = (ListRecipientsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(recipients, that.recipients); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, recipients); + } + + @Override + public String toString() { + return new ToStringer(ListRecipientsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("recipients", recipients) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java index 61d09cbad..77e002831 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List shares by Provider */ @Generated +@JsonSerialize(using = ListSharesRequest.ListSharesRequestSerializer.class) +@JsonDeserialize(using = ListSharesRequest.ListSharesRequestDeserializer.class) public class ListSharesRequest { /** * Maximum number of shares to return. - when set to 0, the page length is set to a server @@ -20,16 +30,12 @@ public class ListSharesRequest { * max_results size, even zero. The only definitive indication that no further shares can be * fetched is when the next_page_token is unset from the response. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** Name of the provider in which to list shares. */ - @JsonIgnore private String name; + private String name; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListSharesRequest setMaxResults(Long maxResults) { @@ -82,4 +88,42 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListSharesRequestPb toPb() { + ListSharesRequestPb pb = new ListSharesRequestPb(); + pb.setMaxResults(maxResults); + pb.setName(name); + pb.setPageToken(pageToken); + + return pb; + } + + static ListSharesRequest fromPb(ListSharesRequestPb pb) { + ListSharesRequest model = new ListSharesRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setName(pb.getName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListSharesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListSharesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSharesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSharesRequestDeserializer extends JsonDeserializer { + @Override + public ListSharesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSharesRequestPb pb = mapper.readValue(p, ListSharesRequestPb.class); + return ListSharesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequestPb.java new file mode 100755 index 000000000..2021c86e5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List shares by Provider */ +@Generated +class ListSharesRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore private String name; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListSharesRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListSharesRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ListSharesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSharesRequestPb that = (ListSharesRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(name, that.name) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, name, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListSharesRequestPb.class) + .add("maxResults", maxResults) + .add("name", name) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java index a7e7f47d0..155ced4d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSharesResponse.ListSharesResponseSerializer.class) +@JsonDeserialize(using = ListSharesResponse.ListSharesResponseDeserializer.class) public class ListSharesResponse { /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. * __page_token__ should be set to this value for the next request (for the next page of results). */ - @JsonProperty("next_page_token") private String nextPageToken; /** An array of data share information objects. */ - @JsonProperty("shares") private Collection shares; public ListSharesResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,40 @@ public String toString() { .add("shares", shares) .toString(); } + + ListSharesResponsePb toPb() { + ListSharesResponsePb pb = new ListSharesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setShares(shares); + + return pb; + } + + static ListSharesResponse fromPb(ListSharesResponsePb pb) { + ListSharesResponse model = new ListSharesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setShares(pb.getShares()); + + return model; + } + + public static class ListSharesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListSharesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSharesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSharesResponseDeserializer extends JsonDeserializer { + @Override + public ListSharesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSharesResponsePb pb = mapper.readValue(p, ListSharesResponsePb.class); + return ListSharesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponsePb.java new file mode 100755 index 000000000..cce37c2b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSharesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("shares") + private Collection shares; + + public ListSharesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSharesResponsePb setShares(Collection shares) { + this.shares = shares; + return this; + } + + public Collection getShares() { + return shares; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSharesResponsePb that = (ListSharesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(shares, that.shares); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, shares); + } + + @Override + public String toString() { + return new ToStringer(ListSharesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("shares", shares) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java index f2194dbed..ca95cff1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = NotebookFile.NotebookFileSerializer.class) +@JsonDeserialize(using = NotebookFile.NotebookFileDeserializer.class) public class NotebookFile { /** The comment of the notebook file. */ - @JsonProperty("comment") private String comment; /** The id of the notebook file. */ - @JsonProperty("id") private String id; /** Name of the notebook file. */ - @JsonProperty("name") private String name; /** The name of the share that the notebook file belongs to. */ - @JsonProperty("share") private String share; /** The id of the share that the notebook file belongs to. */ - @JsonProperty("share_id") private String shareId; /** The tags of the notebook file. */ - @JsonProperty("tags") private Collection tags; public NotebookFile setComment(String comment) { @@ -117,4 +122,47 @@ public String toString() { .add("tags", tags) .toString(); } + + NotebookFilePb toPb() { + NotebookFilePb pb = new NotebookFilePb(); + pb.setComment(comment); + pb.setId(id); + pb.setName(name); + pb.setShare(share); + pb.setShareId(shareId); + pb.setTags(tags); + + return pb; + } + + static NotebookFile fromPb(NotebookFilePb pb) { + NotebookFile model = new NotebookFile(); + model.setComment(pb.getComment()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setShare(pb.getShare()); + model.setShareId(pb.getShareId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class NotebookFileSerializer extends JsonSerializer { + @Override + public void serialize(NotebookFile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NotebookFilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NotebookFileDeserializer extends JsonDeserializer { + @Override + public NotebookFile deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NotebookFilePb pb = mapper.readValue(p, NotebookFilePb.class); + return NotebookFile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFilePb.java new file mode 100755 index 000000000..91171c830 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFilePb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class NotebookFilePb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("share") + private String share; + + @JsonProperty("share_id") + private String shareId; + + @JsonProperty("tags") + private Collection tags; + + public NotebookFilePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public NotebookFilePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public NotebookFilePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public NotebookFilePb setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public NotebookFilePb setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public NotebookFilePb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotebookFilePb that = (NotebookFilePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(comment, id, name, share, shareId, tags); + } + + @Override + public String toString() { + return new ToStringer(NotebookFilePb.class) + .add("comment", comment) + .add("id", id) + .add("name", name) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java index 71cc1fffd..f6c061bee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -14,17 +23,17 @@ * for more details. */ @Generated +@JsonSerialize(using = OidcFederationPolicy.OidcFederationPolicySerializer.class) +@JsonDeserialize(using = OidcFederationPolicy.OidcFederationPolicyDeserializer.class) public class OidcFederationPolicy { /** * The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience * identifier is intended to represent the recipient of the token. Can be any non-empty string * value. As long as the audience in the token matches at least one audience in the policy, */ - @JsonProperty("audiences") private Collection audiences; /** The required token issuer, as specified in the 'iss' claim of federated tokens. */ - @JsonProperty("issuer") private String issuer; /** @@ -35,7 +44,6 @@ public class OidcFederationPolicy { * be the Object ID of the user in Entra ID. - M2M flow (OAuth App access): If the subject claim * is `azp`, this must be the client ID of the OAuth app registered in Entra ID. */ - @JsonProperty("subject") private String subject; /** @@ -48,7 +56,6 @@ public class OidcFederationPolicy { * the OAuth app. - `groups`: Object ID of the group. - `sub`: Subject identifier for other use * cases. */ - @JsonProperty("subject_claim") private String subjectClaim; public OidcFederationPolicy setAudiences(Collection audiences) { @@ -112,4 +119,46 @@ public String toString() { .add("subjectClaim", subjectClaim) .toString(); } + + OidcFederationPolicyPb toPb() { + OidcFederationPolicyPb pb = new OidcFederationPolicyPb(); + pb.setAudiences(audiences); + pb.setIssuer(issuer); + pb.setSubject(subject); + pb.setSubjectClaim(subjectClaim); + + return pb; + } + + static OidcFederationPolicy fromPb(OidcFederationPolicyPb pb) { + OidcFederationPolicy model = new OidcFederationPolicy(); + model.setAudiences(pb.getAudiences()); + model.setIssuer(pb.getIssuer()); + model.setSubject(pb.getSubject()); + model.setSubjectClaim(pb.getSubjectClaim()); + + return model; + } + + public static class OidcFederationPolicySerializer extends JsonSerializer { + @Override + public void serialize( + OidcFederationPolicy value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OidcFederationPolicyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OidcFederationPolicyDeserializer + extends JsonDeserializer { + @Override + public OidcFederationPolicy deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OidcFederationPolicyPb pb = mapper.readValue(p, OidcFederationPolicyPb.class); + return OidcFederationPolicy.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicyPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicyPb.java new file mode 100755 index 000000000..e7c5fbfd5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicyPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Specifies the policy to use for validating OIDC claims in your federated tokens from Delta + * Sharing Clients. Refer to https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed + * for more details. + */ +@Generated +class OidcFederationPolicyPb { + @JsonProperty("audiences") + private Collection audiences; + + @JsonProperty("issuer") + private String issuer; + + @JsonProperty("subject") + private String subject; + + @JsonProperty("subject_claim") + private String subjectClaim; + + public OidcFederationPolicyPb setAudiences(Collection audiences) { + this.audiences = audiences; + return this; + } + + public Collection getAudiences() { + return audiences; + } + + public OidcFederationPolicyPb setIssuer(String issuer) { + this.issuer = issuer; + return this; + } + + public String getIssuer() { + return issuer; + } + + public OidcFederationPolicyPb setSubject(String subject) { + this.subject = subject; + return this; + } + + public String getSubject() { + return subject; + } + + public OidcFederationPolicyPb setSubjectClaim(String subjectClaim) { + this.subjectClaim = subjectClaim; + return this; + } + + public String getSubjectClaim() { + return subjectClaim; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OidcFederationPolicyPb that = (OidcFederationPolicyPb) o; + return Objects.equals(audiences, that.audiences) + && Objects.equals(issuer, that.issuer) + && Objects.equals(subject, that.subject) + && Objects.equals(subjectClaim, that.subjectClaim); + } + + @Override + public int hashCode() { + return Objects.hash(audiences, issuer, subject, subjectClaim); + } + + @Override + public String toString() { + return new ToStringer(OidcFederationPolicyPb.class) + .add("audiences", audiences) + .add("issuer", issuer) + .add("subject", subject) + .add("subjectClaim", subjectClaim) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java index 20815ecc7..44045c98d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Partition.PartitionSerializer.class) +@JsonDeserialize(using = Partition.PartitionDeserializer.class) public class Partition { /** An array of partition values. */ - @JsonProperty("values") private Collection values; public Partition setValues(Collection values) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(Partition.class).add("values", values).toString(); } + + PartitionPb toPb() { + PartitionPb pb = new PartitionPb(); + pb.setValues(values); + + return pb; + } + + static Partition fromPb(PartitionPb pb) { + Partition model = new Partition(); + model.setValues(pb.getValues()); + + return model; + } + + public static class PartitionSerializer extends JsonSerializer { + @Override + public void serialize(Partition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PartitionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PartitionDeserializer extends JsonDeserializer { + @Override + public Partition deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PartitionPb pb = mapper.readValue(p, PartitionPb.class); + return Partition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionPb.java new file mode 100755 index 000000000..8b9c9f08c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PartitionPb { + @JsonProperty("values") + private Collection values; + + public PartitionPb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PartitionPb that = (PartitionPb) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return new ToStringer(PartitionPb.class).add("values", values).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java index ee7da25d1..7793c0db3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java @@ -4,31 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PartitionValue.PartitionValueSerializer.class) +@JsonDeserialize(using = PartitionValue.PartitionValueDeserializer.class) public class PartitionValue { /** The name of the partition column. */ - @JsonProperty("name") private String name; /** The operator to apply for the value. */ - @JsonProperty("op") private PartitionValueOp op; /** * The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this * field is set, field `value` can not be set. */ - @JsonProperty("recipient_property_key") private String recipientPropertyKey; /** * The value of the partition column. When this value is not set, it means `null` value. When this * field is set, field `recipient_property_key` can not be set. */ - @JsonProperty("value") private String value; public PartitionValue setName(String name) { @@ -92,4 +99,44 @@ public String toString() { .add("value", value) .toString(); } + + PartitionValuePb toPb() { + PartitionValuePb pb = new PartitionValuePb(); + pb.setName(name); + pb.setOp(op); + pb.setRecipientPropertyKey(recipientPropertyKey); + pb.setValue(value); + + return pb; + } + + static PartitionValue fromPb(PartitionValuePb pb) { + PartitionValue model = new PartitionValue(); + model.setName(pb.getName()); + model.setOp(pb.getOp()); + model.setRecipientPropertyKey(pb.getRecipientPropertyKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class PartitionValueSerializer extends JsonSerializer { + @Override + public void serialize(PartitionValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PartitionValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PartitionValueDeserializer extends JsonDeserializer { + @Override + public PartitionValue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PartitionValuePb pb = mapper.readValue(p, PartitionValuePb.class); + return PartitionValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValuePb.java new file mode 100755 index 000000000..0cff5cf4c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValuePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PartitionValuePb { + @JsonProperty("name") + private String name; + + @JsonProperty("op") + private PartitionValueOp op; + + @JsonProperty("recipient_property_key") + private String recipientPropertyKey; + + @JsonProperty("value") + private String value; + + public PartitionValuePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PartitionValuePb setOp(PartitionValueOp op) { + this.op = op; + return this; + } + + public PartitionValueOp getOp() { + return op; + } + + public PartitionValuePb setRecipientPropertyKey(String recipientPropertyKey) { + this.recipientPropertyKey = recipientPropertyKey; + return this; + } + + public String getRecipientPropertyKey() { + return recipientPropertyKey; + } + + public PartitionValuePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PartitionValuePb that = (PartitionValuePb) o; + return Objects.equals(name, that.name) + && Objects.equals(op, that.op) + && Objects.equals(recipientPropertyKey, that.recipientPropertyKey) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(name, op, recipientPropertyKey, value); + } + + @Override + public String toString() { + return new ToStringer(PartitionValuePb.class) + .add("name", name) + .add("op", op) + .add("recipientPropertyKey", recipientPropertyKey) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java index d6e35637a..dd1fb6e6e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PermissionsChange.PermissionsChangeSerializer.class) +@JsonDeserialize(using = PermissionsChange.PermissionsChangeDeserializer.class) public class PermissionsChange { /** The set of privileges to add. */ - @JsonProperty("add") private Collection add; /** The principal whose privileges we are changing. */ - @JsonProperty("principal") private String principal; /** The set of privileges to remove. */ - @JsonProperty("remove") private Collection remove; public PermissionsChange setAdd(Collection add) { @@ -72,4 +80,42 @@ public String toString() { .add("remove", remove) .toString(); } + + PermissionsChangePb toPb() { + PermissionsChangePb pb = new PermissionsChangePb(); + pb.setAdd(add); + pb.setPrincipal(principal); + pb.setRemove(remove); + + return pb; + } + + static PermissionsChange fromPb(PermissionsChangePb pb) { + PermissionsChange model = new PermissionsChange(); + model.setAdd(pb.getAdd()); + model.setPrincipal(pb.getPrincipal()); + model.setRemove(pb.getRemove()); + + return model; + } + + public static class PermissionsChangeSerializer extends JsonSerializer { + @Override + public void serialize(PermissionsChange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PermissionsChangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PermissionsChangeDeserializer extends JsonDeserializer { + @Override + public PermissionsChange deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PermissionsChangePb pb = mapper.readValue(p, PermissionsChangePb.class); + return PermissionsChange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChangePb.java new file mode 100755 index 000000000..48fec84cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChangePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PermissionsChangePb { + @JsonProperty("add") + private Collection add; + + @JsonProperty("principal") + private String principal; + + @JsonProperty("remove") + private Collection remove; + + public PermissionsChangePb setAdd(Collection add) { + this.add = add; + return this; + } + + public Collection getAdd() { + return add; + } + + public PermissionsChangePb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PermissionsChangePb setRemove(Collection remove) { + this.remove = remove; + return this; + } + + public Collection getRemove() { + return remove; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionsChangePb that = (PermissionsChangePb) o; + return Objects.equals(add, that.add) + && Objects.equals(principal, that.principal) + && Objects.equals(remove, that.remove); + } + + @Override + public int hashCode() { + return Objects.hash(add, principal, remove); + } + + @Override + public String toString() { + return new ToStringer(PermissionsChangePb.class) + .add("add", add) + .add("principal", principal) + .add("remove", remove) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java index e6cd7395b..7bbd04b52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = PrivilegeAssignment.PrivilegeAssignmentSerializer.class) +@JsonDeserialize(using = PrivilegeAssignment.PrivilegeAssignmentDeserializer.class) public class PrivilegeAssignment { /** The principal (user email address or group name). */ - @JsonProperty("principal") private String principal; /** The privileges assigned to the principal. */ - @JsonProperty("privileges") private Collection privileges; public PrivilegeAssignment setPrincipal(String principal) { @@ -56,4 +65,41 @@ public String toString() { .add("privileges", privileges) .toString(); } + + PrivilegeAssignmentPb toPb() { + PrivilegeAssignmentPb pb = new PrivilegeAssignmentPb(); + pb.setPrincipal(principal); + pb.setPrivileges(privileges); + + return pb; + } + + static PrivilegeAssignment fromPb(PrivilegeAssignmentPb pb) { + PrivilegeAssignment model = new PrivilegeAssignment(); + model.setPrincipal(pb.getPrincipal()); + model.setPrivileges(pb.getPrivileges()); + + return model; + } + + public static class PrivilegeAssignmentSerializer extends JsonSerializer { + @Override + public void serialize(PrivilegeAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PrivilegeAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PrivilegeAssignmentDeserializer + extends JsonDeserializer { + @Override + public PrivilegeAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PrivilegeAssignmentPb pb = mapper.readValue(p, PrivilegeAssignmentPb.class); + return PrivilegeAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignmentPb.java new file mode 100755 index 000000000..c8445773d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignmentPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class PrivilegeAssignmentPb { + @JsonProperty("principal") + private String principal; + + @JsonProperty("privileges") + private Collection privileges; + + public PrivilegeAssignmentPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PrivilegeAssignmentPb setPrivileges(Collection privileges) { + this.privileges = privileges; + return this; + } + + public Collection getPrivileges() { + return privileges; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PrivilegeAssignmentPb that = (PrivilegeAssignmentPb) o; + return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges); + } + + @Override + public int hashCode() { + return Objects.hash(principal, privileges); + } + + @Override + public String toString() { + return new ToStringer(PrivilegeAssignmentPb.class) + .add("principal", principal) + .add("privileges", privileges) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java index 4b5999d5f..7f96f1630 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ProviderInfo.ProviderInfoSerializer.class) +@JsonDeserialize(using = ProviderInfo.ProviderInfoDeserializer.class) public class ProviderInfo { /** The delta sharing authentication type. */ - @JsonProperty("authentication_type") private AuthenticationType authenticationType; /** * Cloud vendor of the provider's UC metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("cloud") private String cloud; /** Description about the provider. */ - @JsonProperty("comment") private String comment; /** Time at which this Provider was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of Provider creator. */ - @JsonProperty("created_by") private String createdBy; /** @@ -37,51 +43,42 @@ public class ProviderInfo { * __authentication_type__ is **DATABRICKS**. The identifier is of format * __cloud__:__region__:__metastore-uuid__. */ - @JsonProperty("data_provider_global_metastore_id") private String dataProviderGlobalMetastoreId; /** * UUID of the provider's UC metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("metastore_id") private String metastoreId; /** The name of the Provider. */ - @JsonProperty("name") private String name; /** Username of Provider owner. */ - @JsonProperty("owner") private String owner; /** * The recipient profile. This field is only present when the authentication_type is `TOKEN` or * `OAUTH_CLIENT_CREDENTIALS`. */ - @JsonProperty("recipient_profile") private RecipientProfile recipientProfile; /** * This field is required when the __authentication_type__ is **TOKEN**, * **OAUTH_CLIENT_CREDENTIALS** or not provided. */ - @JsonProperty("recipient_profile_str") private String recipientProfileStr; /** * Cloud region of the provider's UC metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("region") private String region; /** Time at which this Provider was created, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of user who last modified Provider. */ - @JsonProperty("updated_by") private String updatedBy; public ProviderInfo setAuthenticationType(AuthenticationType authenticationType) { @@ -269,4 +266,63 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + ProviderInfoPb toPb() { + ProviderInfoPb pb = new ProviderInfoPb(); + pb.setAuthenticationType(authenticationType); + pb.setCloud(cloud); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDataProviderGlobalMetastoreId(dataProviderGlobalMetastoreId); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setRecipientProfile(recipientProfile); + pb.setRecipientProfileStr(recipientProfileStr); + pb.setRegion(region); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static ProviderInfo fromPb(ProviderInfoPb pb) { + ProviderInfo model = new ProviderInfo(); + model.setAuthenticationType(pb.getAuthenticationType()); + model.setCloud(pb.getCloud()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDataProviderGlobalMetastoreId(pb.getDataProviderGlobalMetastoreId()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setRecipientProfile(pb.getRecipientProfile()); + model.setRecipientProfileStr(pb.getRecipientProfileStr()); + model.setRegion(pb.getRegion()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class ProviderInfoSerializer extends JsonSerializer { + @Override + public void serialize(ProviderInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProviderInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProviderInfoDeserializer extends JsonDeserializer { + @Override + public ProviderInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProviderInfoPb pb = mapper.readValue(p, ProviderInfoPb.class); + return ProviderInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfoPb.java new file mode 100755 index 000000000..2d783ea96 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfoPb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ProviderInfoPb { + @JsonProperty("authentication_type") + private AuthenticationType authenticationType; + + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("data_provider_global_metastore_id") + private String dataProviderGlobalMetastoreId; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("recipient_profile") + private RecipientProfile recipientProfile; + + @JsonProperty("recipient_profile_str") + private String recipientProfileStr; + + @JsonProperty("region") + private String region; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public ProviderInfoPb setAuthenticationType(AuthenticationType authenticationType) { + this.authenticationType = authenticationType; + return this; + } + + public AuthenticationType getAuthenticationType() { + return authenticationType; + } + + public ProviderInfoPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public ProviderInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ProviderInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ProviderInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ProviderInfoPb setDataProviderGlobalMetastoreId(String dataProviderGlobalMetastoreId) { + this.dataProviderGlobalMetastoreId = dataProviderGlobalMetastoreId; + return this; + } + + public String getDataProviderGlobalMetastoreId() { + return dataProviderGlobalMetastoreId; + } + + public ProviderInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ProviderInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ProviderInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public ProviderInfoPb setRecipientProfile(RecipientProfile recipientProfile) { + this.recipientProfile = recipientProfile; + return this; + } + + public RecipientProfile getRecipientProfile() { + return recipientProfile; + } + + public ProviderInfoPb setRecipientProfileStr(String recipientProfileStr) { + this.recipientProfileStr = recipientProfileStr; + return this; + } + + public String getRecipientProfileStr() { + return recipientProfileStr; + } + + public ProviderInfoPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public ProviderInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ProviderInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderInfoPb that = (ProviderInfoPb) o; + return Objects.equals(authenticationType, that.authenticationType) + && Objects.equals(cloud, that.cloud) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(dataProviderGlobalMetastoreId, that.dataProviderGlobalMetastoreId) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(recipientProfile, that.recipientProfile) + && Objects.equals(recipientProfileStr, that.recipientProfileStr) + && Objects.equals(region, that.region) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + authenticationType, + cloud, + comment, + createdAt, + createdBy, + dataProviderGlobalMetastoreId, + metastoreId, + name, + owner, + recipientProfile, + recipientProfileStr, + region, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(ProviderInfoPb.class) + .add("authenticationType", authenticationType) + .add("cloud", cloud) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("dataProviderGlobalMetastoreId", dataProviderGlobalMetastoreId) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("recipientProfile", recipientProfile) + .add("recipientProfileStr", recipientProfileStr) + .add("region", region) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderShare.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderShare.java index 554bcf390..d6ee1d6cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderShare.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderShare.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ProviderShare.ProviderShareSerializer.class) +@JsonDeserialize(using = ProviderShare.ProviderShareDeserializer.class) public class ProviderShare { /** The name of the Provider Share. */ - @JsonProperty("name") private String name; public ProviderShare setName(String name) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(ProviderShare.class).add("name", name).toString(); } + + ProviderSharePb toPb() { + ProviderSharePb pb = new ProviderSharePb(); + pb.setName(name); + + return pb; + } + + static ProviderShare fromPb(ProviderSharePb pb) { + ProviderShare model = new ProviderShare(); + model.setName(pb.getName()); + + return model; + } + + public static class ProviderShareSerializer extends JsonSerializer { + @Override + public void serialize(ProviderShare value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ProviderSharePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ProviderShareDeserializer extends JsonDeserializer { + @Override + public ProviderShare deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ProviderSharePb pb = mapper.readValue(p, ProviderSharePb.class); + return ProviderShare.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderSharePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderSharePb.java new file mode 100755 index 000000000..b623437e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderSharePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ProviderSharePb { + @JsonProperty("name") + private String name; + + public ProviderSharePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderSharePb that = (ProviderSharePb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(ProviderSharePb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java index 273efebdb..70bfe4b76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java @@ -21,7 +21,7 @@ public ProviderInfo create(CreateProvider request) { String path = "/api/2.1/unity-catalog/providers"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ProviderInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteProviderRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public ProviderInfo get(GetProviderRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ProviderInfo.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.1/unity-catalog/providers"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListProvidersResponse.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public ListProviderShareAssetsResponse listProviderShareAssets( request.getProviderName(), request.getShareName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListProviderShareAssetsResponse.class); } catch (IOException e) { @@ -90,7 +90,7 @@ public ListProviderSharesResponse listShares(ListSharesRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s/shares", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListProviderSharesResponse.class); } catch (IOException e) { @@ -103,7 +103,7 @@ public ProviderInfo update(UpdateProvider request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ProviderInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java index 59ca4edbf..0fe7468d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java @@ -24,7 +24,7 @@ public void getActivationUrlInfo(GetActivationUrlInfoRequest request) { request.getActivationUrl()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, GetActivationUrlInfoResponse.class); } catch (IOException e) { @@ -39,7 +39,7 @@ public RetrieveTokenResponse retrieveToken(RetrieveTokenRequest request) { "/api/2.1/unity-catalog/public/data_sharing_activation/%s", request.getActivationUrl()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RetrieveTokenResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java index 03b5136cf..7bb47ee99 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java @@ -23,7 +23,7 @@ public FederationPolicy create(CreateFederationPolicyRequest request) { "/api/2.0/data-sharing/recipients/%s/federation-policies", request.getRecipientName()); try { Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); @@ -40,7 +40,7 @@ public void delete(DeleteFederationPolicyRequest request) { request.getRecipientName(), request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -56,7 +56,7 @@ public FederationPolicy getFederationPolicy(GetFederationPolicyRequest request) request.getRecipientName(), request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, FederationPolicy.class); } catch (IOException e) { @@ -71,7 +71,7 @@ public ListFederationPoliciesResponse list(ListFederationPoliciesRequest request "/api/2.0/data-sharing/recipients/%s/federation-policies", request.getRecipientName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListFederationPoliciesResponse.class); } catch (IOException e) { @@ -87,7 +87,7 @@ public FederationPolicy update(UpdateFederationPolicyRequest request) { request.getRecipientName(), request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, FederationPolicy.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java index 12cba1572..5d313e566 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java @@ -4,47 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RecipientInfo.RecipientInfoSerializer.class) +@JsonDeserialize(using = RecipientInfo.RecipientInfoDeserializer.class) public class RecipientInfo { /** * A boolean status field showing whether the Recipient's activation URL has been exercised or * not. */ - @JsonProperty("activated") private Boolean activated; /** * Full activation url to retrieve the access token. It will be empty if the token is already * retrieved. */ - @JsonProperty("activation_url") private String activationUrl; /** The delta sharing authentication type. */ - @JsonProperty("authentication_type") private AuthenticationType authenticationType; /** * Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("cloud") private String cloud; /** Description about the recipient. */ - @JsonProperty("comment") private String comment; /** Time at which this recipient was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of recipient creator. */ - @JsonProperty("created_by") private String createdBy; /** @@ -52,30 +56,24 @@ public class RecipientInfo { * present when the __authentication_type__ is **DATABRICKS**. The identifier is of format * __cloud__:__region__:__metastore-uuid__. */ - @JsonProperty("data_recipient_global_metastore_id") private String dataRecipientGlobalMetastoreId; /** Expiration timestamp of the token, in epoch milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** IP Access List */ - @JsonProperty("ip_access_list") private IpAccessList ipAccessList; /** * Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("metastore_id") private String metastoreId; /** Name of Recipient. */ - @JsonProperty("name") private String name; /** Username of the recipient owner. */ - @JsonProperty("owner") private String owner; /** @@ -83,33 +81,27 @@ public class RecipientInfo { * specified properties will override the existing properties. To add and remove properties, one * would need to perform a read-modify-write. */ - @JsonProperty("properties_kvpairs") private SecurablePropertiesKvPairs propertiesKvpairs; /** * Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("region") private String region; /** * The one-time sharing code provided by the data recipient. This field is only present when the * __authentication_type__ is **DATABRICKS**. */ - @JsonProperty("sharing_code") private String sharingCode; /** This field is only present when the __authentication_type__ is **TOKEN**. */ - @JsonProperty("tokens") private Collection tokens; /** Time at which the recipient was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of recipient updater. */ - @JsonProperty("updated_by") private String updatedBy; public RecipientInfo setActivated(Boolean activated) { @@ -357,4 +349,73 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + RecipientInfoPb toPb() { + RecipientInfoPb pb = new RecipientInfoPb(); + pb.setActivated(activated); + pb.setActivationUrl(activationUrl); + pb.setAuthenticationType(authenticationType); + pb.setCloud(cloud); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setDataRecipientGlobalMetastoreId(dataRecipientGlobalMetastoreId); + pb.setExpirationTime(expirationTime); + pb.setIpAccessList(ipAccessList); + pb.setMetastoreId(metastoreId); + pb.setName(name); + pb.setOwner(owner); + pb.setPropertiesKvpairs(propertiesKvpairs); + pb.setRegion(region); + pb.setSharingCode(sharingCode); + pb.setTokens(tokens); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static RecipientInfo fromPb(RecipientInfoPb pb) { + RecipientInfo model = new RecipientInfo(); + model.setActivated(pb.getActivated()); + model.setActivationUrl(pb.getActivationUrl()); + model.setAuthenticationType(pb.getAuthenticationType()); + model.setCloud(pb.getCloud()); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setDataRecipientGlobalMetastoreId(pb.getDataRecipientGlobalMetastoreId()); + model.setExpirationTime(pb.getExpirationTime()); + model.setIpAccessList(pb.getIpAccessList()); + model.setMetastoreId(pb.getMetastoreId()); + model.setName(pb.getName()); + model.setOwner(pb.getOwner()); + model.setPropertiesKvpairs(pb.getPropertiesKvpairs()); + model.setRegion(pb.getRegion()); + model.setSharingCode(pb.getSharingCode()); + model.setTokens(pb.getTokens()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class RecipientInfoSerializer extends JsonSerializer { + @Override + public void serialize(RecipientInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RecipientInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RecipientInfoDeserializer extends JsonDeserializer { + @Override + public RecipientInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RecipientInfoPb pb = mapper.readValue(p, RecipientInfoPb.class); + return RecipientInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfoPb.java new file mode 100755 index 000000000..0e1d6382a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfoPb.java @@ -0,0 +1,315 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RecipientInfoPb { + @JsonProperty("activated") + private Boolean activated; + + @JsonProperty("activation_url") + private String activationUrl; + + @JsonProperty("authentication_type") + private AuthenticationType authenticationType; + + @JsonProperty("cloud") + private String cloud; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("data_recipient_global_metastore_id") + private String dataRecipientGlobalMetastoreId; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("ip_access_list") + private IpAccessList ipAccessList; + + @JsonProperty("metastore_id") + private String metastoreId; + + @JsonProperty("name") + private String name; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties_kvpairs") + private SecurablePropertiesKvPairs propertiesKvpairs; + + @JsonProperty("region") + private String region; + + @JsonProperty("sharing_code") + private String sharingCode; + + @JsonProperty("tokens") + private Collection tokens; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public RecipientInfoPb setActivated(Boolean activated) { + this.activated = activated; + return this; + } + + public Boolean getActivated() { + return activated; + } + + public RecipientInfoPb setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + public RecipientInfoPb setAuthenticationType(AuthenticationType authenticationType) { + this.authenticationType = authenticationType; + return this; + } + + public AuthenticationType getAuthenticationType() { + return authenticationType; + } + + public RecipientInfoPb setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public RecipientInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public RecipientInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public RecipientInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public RecipientInfoPb setDataRecipientGlobalMetastoreId(String dataRecipientGlobalMetastoreId) { + this.dataRecipientGlobalMetastoreId = dataRecipientGlobalMetastoreId; + return this; + } + + public String getDataRecipientGlobalMetastoreId() { + return dataRecipientGlobalMetastoreId; + } + + public RecipientInfoPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public RecipientInfoPb setIpAccessList(IpAccessList ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessList getIpAccessList() { + return ipAccessList; + } + + public RecipientInfoPb setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public RecipientInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public RecipientInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public RecipientInfoPb setPropertiesKvpairs(SecurablePropertiesKvPairs propertiesKvpairs) { + this.propertiesKvpairs = propertiesKvpairs; + return this; + } + + public SecurablePropertiesKvPairs getPropertiesKvpairs() { + return propertiesKvpairs; + } + + public RecipientInfoPb setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public RecipientInfoPb setSharingCode(String sharingCode) { + this.sharingCode = sharingCode; + return this; + } + + public String getSharingCode() { + return sharingCode; + } + + public RecipientInfoPb setTokens(Collection tokens) { + this.tokens = tokens; + return this; + } + + public Collection getTokens() { + return tokens; + } + + public RecipientInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public RecipientInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RecipientInfoPb that = (RecipientInfoPb) o; + return Objects.equals(activated, that.activated) + && Objects.equals(activationUrl, that.activationUrl) + && Objects.equals(authenticationType, that.authenticationType) + && Objects.equals(cloud, that.cloud) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(dataRecipientGlobalMetastoreId, that.dataRecipientGlobalMetastoreId) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(ipAccessList, that.ipAccessList) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(propertiesKvpairs, that.propertiesKvpairs) + && Objects.equals(region, that.region) + && Objects.equals(sharingCode, that.sharingCode) + && Objects.equals(tokens, that.tokens) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + activated, + activationUrl, + authenticationType, + cloud, + comment, + createdAt, + createdBy, + dataRecipientGlobalMetastoreId, + expirationTime, + ipAccessList, + metastoreId, + name, + owner, + propertiesKvpairs, + region, + sharingCode, + tokens, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(RecipientInfoPb.class) + .add("activated", activated) + .add("activationUrl", activationUrl) + .add("authenticationType", authenticationType) + .add("cloud", cloud) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("dataRecipientGlobalMetastoreId", dataRecipientGlobalMetastoreId) + .add("expirationTime", expirationTime) + .add("ipAccessList", ipAccessList) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("propertiesKvpairs", propertiesKvpairs) + .add("region", region) + .add("sharingCode", sharingCode) + .add("tokens", tokens) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfile.java index da8b0267c..a5e8a3108 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfile.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RecipientProfile.RecipientProfileSerializer.class) +@JsonDeserialize(using = RecipientProfile.RecipientProfileDeserializer.class) public class RecipientProfile { /** The token used to authorize the recipient. */ - @JsonProperty("bearer_token") private String bearerToken; /** The endpoint for the share to be used by the recipient. */ - @JsonProperty("endpoint") private String endpoint; /** The version number of the recipient's credentials on a share. */ - @JsonProperty("share_credentials_version") private Long shareCredentialsVersion; public RecipientProfile setBearerToken(String bearerToken) { @@ -71,4 +79,42 @@ public String toString() { .add("shareCredentialsVersion", shareCredentialsVersion) .toString(); } + + RecipientProfilePb toPb() { + RecipientProfilePb pb = new RecipientProfilePb(); + pb.setBearerToken(bearerToken); + pb.setEndpoint(endpoint); + pb.setShareCredentialsVersion(shareCredentialsVersion); + + return pb; + } + + static RecipientProfile fromPb(RecipientProfilePb pb) { + RecipientProfile model = new RecipientProfile(); + model.setBearerToken(pb.getBearerToken()); + model.setEndpoint(pb.getEndpoint()); + model.setShareCredentialsVersion(pb.getShareCredentialsVersion()); + + return model; + } + + public static class RecipientProfileSerializer extends JsonSerializer { + @Override + public void serialize(RecipientProfile value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RecipientProfilePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RecipientProfileDeserializer extends JsonDeserializer { + @Override + public RecipientProfile deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RecipientProfilePb pb = mapper.readValue(p, RecipientProfilePb.class); + return RecipientProfile.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfilePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfilePb.java new file mode 100755 index 000000000..bab27d1c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfilePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RecipientProfilePb { + @JsonProperty("bearer_token") + private String bearerToken; + + @JsonProperty("endpoint") + private String endpoint; + + @JsonProperty("share_credentials_version") + private Long shareCredentialsVersion; + + public RecipientProfilePb setBearerToken(String bearerToken) { + this.bearerToken = bearerToken; + return this; + } + + public String getBearerToken() { + return bearerToken; + } + + public RecipientProfilePb setEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + public String getEndpoint() { + return endpoint; + } + + public RecipientProfilePb setShareCredentialsVersion(Long shareCredentialsVersion) { + this.shareCredentialsVersion = shareCredentialsVersion; + return this; + } + + public Long getShareCredentialsVersion() { + return shareCredentialsVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RecipientProfilePb that = (RecipientProfilePb) o; + return Objects.equals(bearerToken, that.bearerToken) + && Objects.equals(endpoint, that.endpoint) + && Objects.equals(shareCredentialsVersion, that.shareCredentialsVersion); + } + + @Override + public int hashCode() { + return Objects.hash(bearerToken, endpoint, shareCredentialsVersion); + } + + @Override + public String toString() { + return new ToStringer(RecipientProfilePb.class) + .add("bearerToken", bearerToken) + .add("endpoint", endpoint) + .add("shareCredentialsVersion", shareCredentialsVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java index 63f57697a..bdb969d94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java @@ -4,40 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RecipientTokenInfo.RecipientTokenInfoSerializer.class) +@JsonDeserialize(using = RecipientTokenInfo.RecipientTokenInfoDeserializer.class) public class RecipientTokenInfo { /** * Full activation URL to retrieve the access token. It will be empty if the token is already * retrieved. */ - @JsonProperty("activation_url") private String activationUrl; /** Time at which this recipient token was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of recipient token creator. */ - @JsonProperty("created_by") private String createdBy; /** Expiration timestamp of the token in epoch milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** Unique ID of the recipient token. */ - @JsonProperty("id") private String id; /** Time at which this recipient token was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of recipient token updater. */ - @JsonProperty("updated_by") private String updatedBy; public RecipientTokenInfo setActivationUrl(String activationUrl) { @@ -135,4 +139,50 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + RecipientTokenInfoPb toPb() { + RecipientTokenInfoPb pb = new RecipientTokenInfoPb(); + pb.setActivationUrl(activationUrl); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setExpirationTime(expirationTime); + pb.setId(id); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static RecipientTokenInfo fromPb(RecipientTokenInfoPb pb) { + RecipientTokenInfo model = new RecipientTokenInfo(); + model.setActivationUrl(pb.getActivationUrl()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setExpirationTime(pb.getExpirationTime()); + model.setId(pb.getId()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class RecipientTokenInfoSerializer extends JsonSerializer { + @Override + public void serialize(RecipientTokenInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RecipientTokenInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RecipientTokenInfoDeserializer extends JsonDeserializer { + @Override + public RecipientTokenInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RecipientTokenInfoPb pb = mapper.readValue(p, RecipientTokenInfoPb.class); + return RecipientTokenInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfoPb.java new file mode 100755 index 000000000..4174232df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfoPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RecipientTokenInfoPb { + @JsonProperty("activation_url") + private String activationUrl; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("id") + private String id; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public RecipientTokenInfoPb setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + public RecipientTokenInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public RecipientTokenInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public RecipientTokenInfoPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public RecipientTokenInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public RecipientTokenInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public RecipientTokenInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RecipientTokenInfoPb that = (RecipientTokenInfoPb) o; + return Objects.equals(activationUrl, that.activationUrl) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(id, that.id) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + activationUrl, createdAt, createdBy, expirationTime, id, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(RecipientTokenInfoPb.class) + .add("activationUrl", activationUrl) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("expirationTime", expirationTime) + .add("id", id) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java index afd12e0eb..73e3a74f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java @@ -21,7 +21,7 @@ public RecipientInfo create(CreateRecipient request) { String path = "/api/2.1/unity-catalog/recipients"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RecipientInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteRecipientRequest request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public RecipientInfo get(GetRecipientRequest request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RecipientInfo.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListRecipientsResponse list(ListRecipientsRequest request) { String path = "/api/2.1/unity-catalog/recipients"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListRecipientsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public RecipientInfo rotateToken(RotateRecipientToken request) { String.format("/api/2.1/unity-catalog/recipients/%s/rotate-token", request.getName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RecipientInfo.class); @@ -89,7 +89,7 @@ public GetRecipientSharePermissionsResponse sharePermissions(SharePermissionsReq String.format("/api/2.1/unity-catalog/recipients/%s/share-permissions", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetRecipientSharePermissionsResponse.class); } catch (IOException e) { @@ -102,7 +102,7 @@ public RecipientInfo update(UpdateRecipient request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RecipientInfo.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java index a39bd7459..2ede4de7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RegisteredModelAlias.RegisteredModelAliasSerializer.class) +@JsonDeserialize(using = RegisteredModelAlias.RegisteredModelAliasDeserializer.class) public class RegisteredModelAlias { /** Name of the alias. */ - @JsonProperty("alias_name") private String aliasName; /** Numeric model version that alias will reference. */ - @JsonProperty("version_num") private Long versionNum; public RegisteredModelAlias setAliasName(String aliasName) { @@ -55,4 +64,42 @@ public String toString() { .add("versionNum", versionNum) .toString(); } + + RegisteredModelAliasPb toPb() { + RegisteredModelAliasPb pb = new RegisteredModelAliasPb(); + pb.setAliasName(aliasName); + pb.setVersionNum(versionNum); + + return pb; + } + + static RegisteredModelAlias fromPb(RegisteredModelAliasPb pb) { + RegisteredModelAlias model = new RegisteredModelAlias(); + model.setAliasName(pb.getAliasName()); + model.setVersionNum(pb.getVersionNum()); + + return model; + } + + public static class RegisteredModelAliasSerializer extends JsonSerializer { + @Override + public void serialize( + RegisteredModelAlias value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RegisteredModelAliasPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RegisteredModelAliasDeserializer + extends JsonDeserializer { + @Override + public RegisteredModelAlias deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RegisteredModelAliasPb pb = mapper.readValue(p, RegisteredModelAliasPb.class); + return RegisteredModelAlias.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAliasPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAliasPb.java new file mode 100755 index 000000000..74a845ab3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAliasPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RegisteredModelAliasPb { + @JsonProperty("alias_name") + private String aliasName; + + @JsonProperty("version_num") + private Long versionNum; + + public RegisteredModelAliasPb setAliasName(String aliasName) { + this.aliasName = aliasName; + return this; + } + + public String getAliasName() { + return aliasName; + } + + public RegisteredModelAliasPb setVersionNum(Long versionNum) { + this.versionNum = versionNum; + return this; + } + + public Long getVersionNum() { + return versionNum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelAliasPb that = (RegisteredModelAliasPb) o; + return Objects.equals(aliasName, that.aliasName) && Objects.equals(versionNum, that.versionNum); + } + + @Override + public int hashCode() { + return Objects.hash(aliasName, versionNum); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelAliasPb.class) + .add("aliasName", aliasName) + .add("versionNum", versionNum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequest.java index e359a49a1..395a78663 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an access token */ @Generated +@JsonSerialize(using = RetrieveTokenRequest.RetrieveTokenRequestSerializer.class) +@JsonDeserialize(using = RetrieveTokenRequest.RetrieveTokenRequestDeserializer.class) public class RetrieveTokenRequest { /** The one time activation url. It also accepts activation token. */ - @JsonIgnore private String activationUrl; + private String activationUrl; public RetrieveTokenRequest setActivationUrl(String activationUrl) { this.activationUrl = activationUrl; @@ -41,4 +52,40 @@ public String toString() { .add("activationUrl", activationUrl) .toString(); } + + RetrieveTokenRequestPb toPb() { + RetrieveTokenRequestPb pb = new RetrieveTokenRequestPb(); + pb.setActivationUrl(activationUrl); + + return pb; + } + + static RetrieveTokenRequest fromPb(RetrieveTokenRequestPb pb) { + RetrieveTokenRequest model = new RetrieveTokenRequest(); + model.setActivationUrl(pb.getActivationUrl()); + + return model; + } + + public static class RetrieveTokenRequestSerializer extends JsonSerializer { + @Override + public void serialize( + RetrieveTokenRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RetrieveTokenRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RetrieveTokenRequestDeserializer + extends JsonDeserializer { + @Override + public RetrieveTokenRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RetrieveTokenRequestPb pb = mapper.readValue(p, RetrieveTokenRequestPb.class); + return RetrieveTokenRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequestPb.java new file mode 100755 index 000000000..fc09655d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an access token */ +@Generated +class RetrieveTokenRequestPb { + @JsonIgnore private String activationUrl; + + public RetrieveTokenRequestPb setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RetrieveTokenRequestPb that = (RetrieveTokenRequestPb) o; + return Objects.equals(activationUrl, that.activationUrl); + } + + @Override + public int hashCode() { + return Objects.hash(activationUrl); + } + + @Override + public String toString() { + return new ToStringer(RetrieveTokenRequestPb.class) + .add("activationUrl", activationUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponse.java index fdd7b0ce3..6de3f2881 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponse.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RetrieveTokenResponse.RetrieveTokenResponseSerializer.class) +@JsonDeserialize(using = RetrieveTokenResponse.RetrieveTokenResponseDeserializer.class) public class RetrieveTokenResponse { /** The token used to authorize the recipient. */ - @JsonProperty("bearerToken") private String bearerToken; /** The endpoint for the share to be used by the recipient. */ - @JsonProperty("endpoint") private String endpoint; /** Expiration timestamp of the token in epoch milliseconds. */ - @JsonProperty("expirationTime") private String expirationTime; /** These field names must follow the delta sharing protocol. */ - @JsonProperty("shareCredentialsVersion") private Long shareCredentialsVersion; public RetrieveTokenResponse setBearerToken(String bearerToken) { @@ -86,4 +93,47 @@ public String toString() { .add("shareCredentialsVersion", shareCredentialsVersion) .toString(); } + + RetrieveTokenResponsePb toPb() { + RetrieveTokenResponsePb pb = new RetrieveTokenResponsePb(); + pb.setBearerToken(bearerToken); + pb.setEndpoint(endpoint); + pb.setExpirationTime(expirationTime); + pb.setShareCredentialsVersion(shareCredentialsVersion); + + return pb; + } + + static RetrieveTokenResponse fromPb(RetrieveTokenResponsePb pb) { + RetrieveTokenResponse model = new RetrieveTokenResponse(); + model.setBearerToken(pb.getBearerToken()); + model.setEndpoint(pb.getEndpoint()); + model.setExpirationTime(pb.getExpirationTime()); + model.setShareCredentialsVersion(pb.getShareCredentialsVersion()); + + return model; + } + + public static class RetrieveTokenResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RetrieveTokenResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RetrieveTokenResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RetrieveTokenResponseDeserializer + extends JsonDeserializer { + @Override + public RetrieveTokenResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RetrieveTokenResponsePb pb = mapper.readValue(p, RetrieveTokenResponsePb.class); + return RetrieveTokenResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponsePb.java new file mode 100755 index 000000000..ff4dd8ad3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponsePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RetrieveTokenResponsePb { + @JsonProperty("bearerToken") + private String bearerToken; + + @JsonProperty("endpoint") + private String endpoint; + + @JsonProperty("expirationTime") + private String expirationTime; + + @JsonProperty("shareCredentialsVersion") + private Long shareCredentialsVersion; + + public RetrieveTokenResponsePb setBearerToken(String bearerToken) { + this.bearerToken = bearerToken; + return this; + } + + public String getBearerToken() { + return bearerToken; + } + + public RetrieveTokenResponsePb setEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + public String getEndpoint() { + return endpoint; + } + + public RetrieveTokenResponsePb setExpirationTime(String expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public String getExpirationTime() { + return expirationTime; + } + + public RetrieveTokenResponsePb setShareCredentialsVersion(Long shareCredentialsVersion) { + this.shareCredentialsVersion = shareCredentialsVersion; + return this; + } + + public Long getShareCredentialsVersion() { + return shareCredentialsVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RetrieveTokenResponsePb that = (RetrieveTokenResponsePb) o; + return Objects.equals(bearerToken, that.bearerToken) + && Objects.equals(endpoint, that.endpoint) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(shareCredentialsVersion, that.shareCredentialsVersion); + } + + @Override + public int hashCode() { + return Objects.hash(bearerToken, endpoint, expirationTime, shareCredentialsVersion); + } + + @Override + public String toString() { + return new ToStringer(RetrieveTokenResponsePb.class) + .add("bearerToken", bearerToken) + .add("endpoint", endpoint) + .add("expirationTime", expirationTime) + .add("shareCredentialsVersion", shareCredentialsVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java index 07b63c93c..ceeee6b23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RotateRecipientToken.RotateRecipientTokenSerializer.class) +@JsonDeserialize(using = RotateRecipientToken.RotateRecipientTokenDeserializer.class) public class RotateRecipientToken { /** * The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time * of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to * expire the existing token immediately, negative number will return an error. */ - @JsonProperty("existing_token_expire_in_seconds") private Long existingTokenExpireInSeconds; /** The name of the Recipient. */ - @JsonIgnore private String name; + private String name; public RotateRecipientToken setExistingTokenExpireInSeconds(Long existingTokenExpireInSeconds) { this.existingTokenExpireInSeconds = existingTokenExpireInSeconds; @@ -60,4 +69,42 @@ public String toString() { .add("name", name) .toString(); } + + RotateRecipientTokenPb toPb() { + RotateRecipientTokenPb pb = new RotateRecipientTokenPb(); + pb.setExistingTokenExpireInSeconds(existingTokenExpireInSeconds); + pb.setName(name); + + return pb; + } + + static RotateRecipientToken fromPb(RotateRecipientTokenPb pb) { + RotateRecipientToken model = new RotateRecipientToken(); + model.setExistingTokenExpireInSeconds(pb.getExistingTokenExpireInSeconds()); + model.setName(pb.getName()); + + return model; + } + + public static class RotateRecipientTokenSerializer extends JsonSerializer { + @Override + public void serialize( + RotateRecipientToken value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RotateRecipientTokenPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RotateRecipientTokenDeserializer + extends JsonDeserializer { + @Override + public RotateRecipientToken deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RotateRecipientTokenPb pb = mapper.readValue(p, RotateRecipientTokenPb.class); + return RotateRecipientToken.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientTokenPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientTokenPb.java new file mode 100755 index 000000000..d71d290e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientTokenPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RotateRecipientTokenPb { + @JsonProperty("existing_token_expire_in_seconds") + private Long existingTokenExpireInSeconds; + + @JsonIgnore private String name; + + public RotateRecipientTokenPb setExistingTokenExpireInSeconds(Long existingTokenExpireInSeconds) { + this.existingTokenExpireInSeconds = existingTokenExpireInSeconds; + return this; + } + + public Long getExistingTokenExpireInSeconds() { + return existingTokenExpireInSeconds; + } + + public RotateRecipientTokenPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RotateRecipientTokenPb that = (RotateRecipientTokenPb) o; + return Objects.equals(existingTokenExpireInSeconds, that.existingTokenExpireInSeconds) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(existingTokenExpireInSeconds, name); + } + + @Override + public String toString() { + return new ToStringer(RotateRecipientTokenPb.class) + .add("existingTokenExpireInSeconds", existingTokenExpireInSeconds) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairs.java index fb317ac29..0f7879c5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairs.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @@ -12,9 +21,10 @@ * An object with __properties__ containing map of key-value properties attached to the securable. */ @Generated +@JsonSerialize(using = SecurablePropertiesKvPairs.SecurablePropertiesKvPairsSerializer.class) +@JsonDeserialize(using = SecurablePropertiesKvPairs.SecurablePropertiesKvPairsDeserializer.class) public class SecurablePropertiesKvPairs { /** A map of key-value properties attached to the securable. */ - @JsonProperty("properties") private Map properties; public SecurablePropertiesKvPairs setProperties(Map properties) { @@ -45,4 +55,41 @@ public String toString() { .add("properties", properties) .toString(); } + + SecurablePropertiesKvPairsPb toPb() { + SecurablePropertiesKvPairsPb pb = new SecurablePropertiesKvPairsPb(); + pb.setProperties(properties); + + return pb; + } + + static SecurablePropertiesKvPairs fromPb(SecurablePropertiesKvPairsPb pb) { + SecurablePropertiesKvPairs model = new SecurablePropertiesKvPairs(); + model.setProperties(pb.getProperties()); + + return model; + } + + public static class SecurablePropertiesKvPairsSerializer + extends JsonSerializer { + @Override + public void serialize( + SecurablePropertiesKvPairs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SecurablePropertiesKvPairsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SecurablePropertiesKvPairsDeserializer + extends JsonDeserializer { + @Override + public SecurablePropertiesKvPairs deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SecurablePropertiesKvPairsPb pb = mapper.readValue(p, SecurablePropertiesKvPairsPb.class); + return SecurablePropertiesKvPairs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairsPb.java new file mode 100755 index 000000000..43767bb77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SecurablePropertiesKvPairsPb.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** + * An object with __properties__ containing map of key-value properties attached to the securable. + */ +@Generated +class SecurablePropertiesKvPairsPb { + @JsonProperty("properties") + private Map properties; + + public SecurablePropertiesKvPairsPb setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SecurablePropertiesKvPairsPb that = (SecurablePropertiesKvPairsPb) o; + return Objects.equals(properties, that.properties); + } + + @Override + public int hashCode() { + return Objects.hash(properties); + } + + @Override + public String toString() { + return new ToStringer(SecurablePropertiesKvPairsPb.class) + .add("properties", properties) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java index 36deca691..5c678b8f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java @@ -4,50 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ShareInfo.ShareInfoSerializer.class) +@JsonDeserialize(using = ShareInfo.ShareInfoDeserializer.class) public class ShareInfo { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** Time at which this share was created, in epoch milliseconds. */ - @JsonProperty("created_at") private Long createdAt; /** Username of share creator. */ - @JsonProperty("created_by") private String createdBy; /** Name of the share. */ - @JsonProperty("name") private String name; /** A list of shared data objects within the share. */ - @JsonProperty("objects") private Collection objects; /** Username of current owner of share. */ - @JsonProperty("owner") private String owner; /** Storage Location URL (full path) for the share. */ - @JsonProperty("storage_location") private String storageLocation; /** Storage root URL for the share. */ - @JsonProperty("storage_root") private String storageRoot; /** Time at which this share was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") private Long updatedAt; /** Username of share updater. */ - @JsonProperty("updated_by") private String updatedBy; public ShareInfo setComment(String comment) { @@ -187,4 +188,55 @@ public String toString() { .add("updatedBy", updatedBy) .toString(); } + + ShareInfoPb toPb() { + ShareInfoPb pb = new ShareInfoPb(); + pb.setComment(comment); + pb.setCreatedAt(createdAt); + pb.setCreatedBy(createdBy); + pb.setName(name); + pb.setObjects(objects); + pb.setOwner(owner); + pb.setStorageLocation(storageLocation); + pb.setStorageRoot(storageRoot); + pb.setUpdatedAt(updatedAt); + pb.setUpdatedBy(updatedBy); + + return pb; + } + + static ShareInfo fromPb(ShareInfoPb pb) { + ShareInfo model = new ShareInfo(); + model.setComment(pb.getComment()); + model.setCreatedAt(pb.getCreatedAt()); + model.setCreatedBy(pb.getCreatedBy()); + model.setName(pb.getName()); + model.setObjects(pb.getObjects()); + model.setOwner(pb.getOwner()); + model.setStorageLocation(pb.getStorageLocation()); + model.setStorageRoot(pb.getStorageRoot()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUpdatedBy(pb.getUpdatedBy()); + + return model; + } + + public static class ShareInfoSerializer extends JsonSerializer { + @Override + public void serialize(ShareInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ShareInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ShareInfoDeserializer extends JsonDeserializer { + @Override + public ShareInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ShareInfoPb pb = mapper.readValue(p, ShareInfoPb.class); + return ShareInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfoPb.java new file mode 100755 index 000000000..b5ea85ac7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfoPb.java @@ -0,0 +1,180 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ShareInfoPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("created_by") + private String createdBy; + + @JsonProperty("name") + private String name; + + @JsonProperty("objects") + private Collection objects; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("updated_at") + private Long updatedAt; + + @JsonProperty("updated_by") + private String updatedBy; + + public ShareInfoPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public ShareInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ShareInfoPb setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ShareInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ShareInfoPb setObjects(Collection objects) { + this.objects = objects; + return this; + } + + public Collection getObjects() { + return objects; + } + + public ShareInfoPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public ShareInfoPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public ShareInfoPb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public ShareInfoPb setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ShareInfoPb setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShareInfoPb that = (ShareInfoPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(name, that.name) + && Objects.equals(objects, that.objects) + && Objects.equals(owner, that.owner) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + createdAt, + createdBy, + name, + objects, + owner, + storageLocation, + storageRoot, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(ShareInfoPb.class) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("name", name) + .add("objects", objects) + .add("owner", owner) + .add("storageLocation", storageLocation) + .add("storageRoot", storageRoot) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequest.java index 0ea7e83c5..6aceb185d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get recipient share permissions */ @Generated +@JsonSerialize(using = SharePermissionsRequest.SharePermissionsRequestSerializer.class) +@JsonDeserialize(using = SharePermissionsRequest.SharePermissionsRequestDeserializer.class) public class SharePermissionsRequest { /** * Maximum number of permissions to return. - when set to 0, the page length is set to a server @@ -20,16 +30,12 @@ public class SharePermissionsRequest { * max_results size, even zero. The only definitive indication that no further permissions can be * fetched is when the next_page_token is unset from the response. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** The name of the Recipient. */ - @JsonIgnore private String name; + private String name; /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public SharePermissionsRequest setMaxResults(Long maxResults) { @@ -82,4 +88,45 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + SharePermissionsRequestPb toPb() { + SharePermissionsRequestPb pb = new SharePermissionsRequestPb(); + pb.setMaxResults(maxResults); + pb.setName(name); + pb.setPageToken(pageToken); + + return pb; + } + + static SharePermissionsRequest fromPb(SharePermissionsRequestPb pb) { + SharePermissionsRequest model = new SharePermissionsRequest(); + model.setMaxResults(pb.getMaxResults()); + model.setName(pb.getName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class SharePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SharePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SharePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SharePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public SharePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SharePermissionsRequestPb pb = mapper.readValue(p, SharePermissionsRequestPb.class); + return SharePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequestPb.java new file mode 100755 index 000000000..ac8ec84fb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get recipient share permissions */ +@Generated +class SharePermissionsRequestPb { + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore private String name; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public SharePermissionsRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SharePermissionsRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SharePermissionsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharePermissionsRequestPb that = (SharePermissionsRequestPb) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(name, that.name) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, name, pageToken); + } + + @Override + public String toString() { + return new ToStringer(SharePermissionsRequestPb.class) + .add("maxResults", maxResults) + .add("name", name) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignment.java index a810447a2..b2d3999af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignment.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ShareToPrivilegeAssignment.ShareToPrivilegeAssignmentSerializer.class) +@JsonDeserialize(using = ShareToPrivilegeAssignment.ShareToPrivilegeAssignmentDeserializer.class) public class ShareToPrivilegeAssignment { /** The privileges assigned to the principal. */ - @JsonProperty("privilege_assignments") private Collection privilegeAssignments; /** The share name. */ - @JsonProperty("share_name") private String shareName; public ShareToPrivilegeAssignment setPrivilegeAssignments( @@ -58,4 +67,43 @@ public String toString() { .add("shareName", shareName) .toString(); } + + ShareToPrivilegeAssignmentPb toPb() { + ShareToPrivilegeAssignmentPb pb = new ShareToPrivilegeAssignmentPb(); + pb.setPrivilegeAssignments(privilegeAssignments); + pb.setShareName(shareName); + + return pb; + } + + static ShareToPrivilegeAssignment fromPb(ShareToPrivilegeAssignmentPb pb) { + ShareToPrivilegeAssignment model = new ShareToPrivilegeAssignment(); + model.setPrivilegeAssignments(pb.getPrivilegeAssignments()); + model.setShareName(pb.getShareName()); + + return model; + } + + public static class ShareToPrivilegeAssignmentSerializer + extends JsonSerializer { + @Override + public void serialize( + ShareToPrivilegeAssignment value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ShareToPrivilegeAssignmentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ShareToPrivilegeAssignmentDeserializer + extends JsonDeserializer { + @Override + public ShareToPrivilegeAssignment deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ShareToPrivilegeAssignmentPb pb = mapper.readValue(p, ShareToPrivilegeAssignmentPb.class); + return ShareToPrivilegeAssignment.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignmentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignmentPb.java new file mode 100755 index 000000000..37b45787c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignmentPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ShareToPrivilegeAssignmentPb { + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + @JsonProperty("share_name") + private String shareName; + + public ShareToPrivilegeAssignmentPb setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + public ShareToPrivilegeAssignmentPb setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShareToPrivilegeAssignmentPb that = (ShareToPrivilegeAssignmentPb) o; + return Objects.equals(privilegeAssignments, that.privilegeAssignments) + && Objects.equals(shareName, that.shareName); + } + + @Override + public int hashCode() { + return Objects.hash(privilegeAssignments, shareName); + } + + @Override + public String toString() { + return new ToStringer(ShareToPrivilegeAssignmentPb.class) + .add("privilegeAssignments", privilegeAssignments) + .add("shareName", shareName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java index f6e090ef5..23f2213c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SharedDataObject.SharedDataObjectSerializer.class) +@JsonDeserialize(using = SharedDataObject.SharedDataObjectDeserializer.class) public class SharedDataObject { /** The time when this data object is added to the share, in epoch milliseconds. */ - @JsonProperty("added_at") private Long addedAt; /** Username of the sharer. */ - @JsonProperty("added_by") private String addedBy; /** Whether to enable cdf or indicate if cdf is enabled on the shared object. */ - @JsonProperty("cdf_enabled") private Boolean cdfEnabled; /** A user-provided comment when adding the data object to the share. */ - @JsonProperty("comment") private String comment; /** @@ -31,18 +38,15 @@ public class SharedDataObject { * base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other * types. */ - @JsonProperty("content") private String content; /** The type of the data object. */ - @JsonProperty("data_object_type") private SharedDataObjectDataObjectType dataObjectType; /** * Whether to enable or disable sharing of data history. If not specified, the default is * **DISABLED**. */ - @JsonProperty("history_data_sharing_status") private SharedDataObjectHistoryDataSharingStatus historyDataSharingStatus; /** @@ -50,11 +54,9 @@ public class SharedDataObject { * a table's fully qualified name is in the format of * `..
`, */ - @JsonProperty("name") private String name; /** Array of partitions for the shared data. */ - @JsonProperty("partitions") private Collection partitions; /** @@ -63,7 +65,6 @@ public class SharedDataObject { * `shared_as` name. The `shared_as` name must be unique within a share. For * tables, the new name must follow the format of `.
`. */ - @JsonProperty("shared_as") private String sharedAs; /** @@ -74,11 +75,9 @@ public class SharedDataObject { * *

NOTE: The start_version should be <= the `current` version of the object. */ - @JsonProperty("start_version") private Long startVersion; /** One of: **ACTIVE**, **PERMISSION_DENIED**. */ - @JsonProperty("status") private SharedDataObjectStatus status; /** @@ -87,7 +86,6 @@ public class SharedDataObject { * `string_shared_as` name must be unique for objects of the same type within a Share. For * notebooks, the new name should be the new notebook file name. */ - @JsonProperty("string_shared_as") private String stringSharedAs; public SharedDataObject setAddedAt(Long addedAt) { @@ -264,4 +262,62 @@ public String toString() { .add("stringSharedAs", stringSharedAs) .toString(); } + + SharedDataObjectPb toPb() { + SharedDataObjectPb pb = new SharedDataObjectPb(); + pb.setAddedAt(addedAt); + pb.setAddedBy(addedBy); + pb.setCdfEnabled(cdfEnabled); + pb.setComment(comment); + pb.setContent(content); + pb.setDataObjectType(dataObjectType); + pb.setHistoryDataSharingStatus(historyDataSharingStatus); + pb.setName(name); + pb.setPartitions(partitions); + pb.setSharedAs(sharedAs); + pb.setStartVersion(startVersion); + pb.setStatus(status); + pb.setStringSharedAs(stringSharedAs); + + return pb; + } + + static SharedDataObject fromPb(SharedDataObjectPb pb) { + SharedDataObject model = new SharedDataObject(); + model.setAddedAt(pb.getAddedAt()); + model.setAddedBy(pb.getAddedBy()); + model.setCdfEnabled(pb.getCdfEnabled()); + model.setComment(pb.getComment()); + model.setContent(pb.getContent()); + model.setDataObjectType(pb.getDataObjectType()); + model.setHistoryDataSharingStatus(pb.getHistoryDataSharingStatus()); + model.setName(pb.getName()); + model.setPartitions(pb.getPartitions()); + model.setSharedAs(pb.getSharedAs()); + model.setStartVersion(pb.getStartVersion()); + model.setStatus(pb.getStatus()); + model.setStringSharedAs(pb.getStringSharedAs()); + + return model; + } + + public static class SharedDataObjectSerializer extends JsonSerializer { + @Override + public void serialize(SharedDataObject value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SharedDataObjectPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SharedDataObjectDeserializer extends JsonDeserializer { + @Override + public SharedDataObject deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SharedDataObjectPb pb = mapper.readValue(p, SharedDataObjectPb.class); + return SharedDataObject.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectPb.java new file mode 100755 index 000000000..05e07b057 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectPb.java @@ -0,0 +1,226 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SharedDataObjectPb { + @JsonProperty("added_at") + private Long addedAt; + + @JsonProperty("added_by") + private String addedBy; + + @JsonProperty("cdf_enabled") + private Boolean cdfEnabled; + + @JsonProperty("comment") + private String comment; + + @JsonProperty("content") + private String content; + + @JsonProperty("data_object_type") + private SharedDataObjectDataObjectType dataObjectType; + + @JsonProperty("history_data_sharing_status") + private SharedDataObjectHistoryDataSharingStatus historyDataSharingStatus; + + @JsonProperty("name") + private String name; + + @JsonProperty("partitions") + private Collection partitions; + + @JsonProperty("shared_as") + private String sharedAs; + + @JsonProperty("start_version") + private Long startVersion; + + @JsonProperty("status") + private SharedDataObjectStatus status; + + @JsonProperty("string_shared_as") + private String stringSharedAs; + + public SharedDataObjectPb setAddedAt(Long addedAt) { + this.addedAt = addedAt; + return this; + } + + public Long getAddedAt() { + return addedAt; + } + + public SharedDataObjectPb setAddedBy(String addedBy) { + this.addedBy = addedBy; + return this; + } + + public String getAddedBy() { + return addedBy; + } + + public SharedDataObjectPb setCdfEnabled(Boolean cdfEnabled) { + this.cdfEnabled = cdfEnabled; + return this; + } + + public Boolean getCdfEnabled() { + return cdfEnabled; + } + + public SharedDataObjectPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public SharedDataObjectPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public SharedDataObjectPb setDataObjectType(SharedDataObjectDataObjectType dataObjectType) { + this.dataObjectType = dataObjectType; + return this; + } + + public SharedDataObjectDataObjectType getDataObjectType() { + return dataObjectType; + } + + public SharedDataObjectPb setHistoryDataSharingStatus( + SharedDataObjectHistoryDataSharingStatus historyDataSharingStatus) { + this.historyDataSharingStatus = historyDataSharingStatus; + return this; + } + + public SharedDataObjectHistoryDataSharingStatus getHistoryDataSharingStatus() { + return historyDataSharingStatus; + } + + public SharedDataObjectPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SharedDataObjectPb setPartitions(Collection partitions) { + this.partitions = partitions; + return this; + } + + public Collection getPartitions() { + return partitions; + } + + public SharedDataObjectPb setSharedAs(String sharedAs) { + this.sharedAs = sharedAs; + return this; + } + + public String getSharedAs() { + return sharedAs; + } + + public SharedDataObjectPb setStartVersion(Long startVersion) { + this.startVersion = startVersion; + return this; + } + + public Long getStartVersion() { + return startVersion; + } + + public SharedDataObjectPb setStatus(SharedDataObjectStatus status) { + this.status = status; + return this; + } + + public SharedDataObjectStatus getStatus() { + return status; + } + + public SharedDataObjectPb setStringSharedAs(String stringSharedAs) { + this.stringSharedAs = stringSharedAs; + return this; + } + + public String getStringSharedAs() { + return stringSharedAs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharedDataObjectPb that = (SharedDataObjectPb) o; + return Objects.equals(addedAt, that.addedAt) + && Objects.equals(addedBy, that.addedBy) + && Objects.equals(cdfEnabled, that.cdfEnabled) + && Objects.equals(comment, that.comment) + && Objects.equals(content, that.content) + && Objects.equals(dataObjectType, that.dataObjectType) + && Objects.equals(historyDataSharingStatus, that.historyDataSharingStatus) + && Objects.equals(name, that.name) + && Objects.equals(partitions, that.partitions) + && Objects.equals(sharedAs, that.sharedAs) + && Objects.equals(startVersion, that.startVersion) + && Objects.equals(status, that.status) + && Objects.equals(stringSharedAs, that.stringSharedAs); + } + + @Override + public int hashCode() { + return Objects.hash( + addedAt, + addedBy, + cdfEnabled, + comment, + content, + dataObjectType, + historyDataSharingStatus, + name, + partitions, + sharedAs, + startVersion, + status, + stringSharedAs); + } + + @Override + public String toString() { + return new ToStringer(SharedDataObjectPb.class) + .add("addedAt", addedAt) + .add("addedBy", addedBy) + .add("cdfEnabled", cdfEnabled) + .add("comment", comment) + .add("content", content) + .add("dataObjectType", dataObjectType) + .add("historyDataSharingStatus", historyDataSharingStatus) + .add("name", name) + .add("partitions", partitions) + .add("sharedAs", sharedAs) + .add("startVersion", startVersion) + .add("status", status) + .add("stringSharedAs", stringSharedAs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java index 567d0d2a6..5a79b8978 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SharedDataObjectUpdate.SharedDataObjectUpdateSerializer.class) +@JsonDeserialize(using = SharedDataObjectUpdate.SharedDataObjectUpdateDeserializer.class) public class SharedDataObjectUpdate { /** One of: **ADD**, **REMOVE**, **UPDATE**. */ - @JsonProperty("action") private SharedDataObjectUpdateAction action; /** The data object that is being added, removed, or updated. */ - @JsonProperty("data_object") private SharedDataObject dataObject; public SharedDataObjectUpdate setAction(SharedDataObjectUpdateAction action) { @@ -55,4 +64,43 @@ public String toString() { .add("dataObject", dataObject) .toString(); } + + SharedDataObjectUpdatePb toPb() { + SharedDataObjectUpdatePb pb = new SharedDataObjectUpdatePb(); + pb.setAction(action); + pb.setDataObject(dataObject); + + return pb; + } + + static SharedDataObjectUpdate fromPb(SharedDataObjectUpdatePb pb) { + SharedDataObjectUpdate model = new SharedDataObjectUpdate(); + model.setAction(pb.getAction()); + model.setDataObject(pb.getDataObject()); + + return model; + } + + public static class SharedDataObjectUpdateSerializer + extends JsonSerializer { + @Override + public void serialize( + SharedDataObjectUpdate value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SharedDataObjectUpdatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SharedDataObjectUpdateDeserializer + extends JsonDeserializer { + @Override + public SharedDataObjectUpdate deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SharedDataObjectUpdatePb pb = mapper.readValue(p, SharedDataObjectUpdatePb.class); + return SharedDataObjectUpdate.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdatePb.java new file mode 100755 index 000000000..94dabbdff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdatePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SharedDataObjectUpdatePb { + @JsonProperty("action") + private SharedDataObjectUpdateAction action; + + @JsonProperty("data_object") + private SharedDataObject dataObject; + + public SharedDataObjectUpdatePb setAction(SharedDataObjectUpdateAction action) { + this.action = action; + return this; + } + + public SharedDataObjectUpdateAction getAction() { + return action; + } + + public SharedDataObjectUpdatePb setDataObject(SharedDataObject dataObject) { + this.dataObject = dataObject; + return this; + } + + public SharedDataObject getDataObject() { + return dataObject; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharedDataObjectUpdatePb that = (SharedDataObjectUpdatePb) o; + return Objects.equals(action, that.action) && Objects.equals(dataObject, that.dataObject); + } + + @Override + public int hashCode() { + return Objects.hash(action, dataObject); + } + + @Override + public String toString() { + return new ToStringer(SharedDataObjectUpdatePb.class) + .add("action", action) + .add("dataObject", dataObject) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java index df926ca13..30f2d181b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java @@ -21,7 +21,7 @@ public ShareInfo create(CreateShare request) { String path = "/api/2.1/unity-catalog/shares"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ShareInfo.class); @@ -35,7 +35,7 @@ public void delete(DeleteShareRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -47,7 +47,7 @@ public ShareInfo get(GetShareRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ShareInfo.class); } catch (IOException e) { @@ -60,7 +60,7 @@ public ListSharesResponse list(ListSharesRequest request) { String path = "/api/2.1/unity-catalog/shares"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSharesResponse.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public GetSharePermissionsResponse sharePermissions(SharePermissionsRequest requ String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetSharePermissionsResponse.class); } catch (IOException e) { @@ -86,7 +86,7 @@ public ShareInfo update(UpdateShare request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ShareInfo.class); @@ -100,7 +100,7 @@ public UpdateSharePermissionsResponse updatePermissions(UpdateSharePermissions r String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateSharePermissionsResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java index 119e8a050..c6d6d78de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java @@ -4,50 +4,51 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Table.TableSerializer.class) +@JsonDeserialize(using = Table.TableDeserializer.class) public class Table { /** The comment of the table. */ - @JsonProperty("comment") private String comment; /** The id of the table. */ - @JsonProperty("id") private String id; /** Internal information for D2D sharing that should not be disclosed to external users. */ - @JsonProperty("internal_attributes") private TableInternalAttributes internalAttributes; /** The catalog and schema of the materialized table */ - @JsonProperty("materialization_namespace") private String materializationNamespace; /** The name of a materialized table. */ - @JsonProperty("materialized_table_name") private String materializedTableName; /** The name of the table. */ - @JsonProperty("name") private String name; /** The name of the schema that the table belongs to. */ - @JsonProperty("schema") private String schema; /** The name of the share that the table belongs to. */ - @JsonProperty("share") private String share; /** The id of the share that the table belongs to. */ - @JsonProperty("share_id") private String shareId; /** The Tags of the table. */ - @JsonProperty("tags") private Collection tags; public Table setComment(String comment) { @@ -187,4 +188,55 @@ public String toString() { .add("tags", tags) .toString(); } + + TablePb toPb() { + TablePb pb = new TablePb(); + pb.setComment(comment); + pb.setId(id); + pb.setInternalAttributes(internalAttributes); + pb.setMaterializationNamespace(materializationNamespace); + pb.setMaterializedTableName(materializedTableName); + pb.setName(name); + pb.setSchema(schema); + pb.setShare(share); + pb.setShareId(shareId); + pb.setTags(tags); + + return pb; + } + + static Table fromPb(TablePb pb) { + Table model = new Table(); + model.setComment(pb.getComment()); + model.setId(pb.getId()); + model.setInternalAttributes(pb.getInternalAttributes()); + model.setMaterializationNamespace(pb.getMaterializationNamespace()); + model.setMaterializedTableName(pb.getMaterializedTableName()); + model.setName(pb.getName()); + model.setSchema(pb.getSchema()); + model.setShare(pb.getShare()); + model.setShareId(pb.getShareId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class TableSerializer extends JsonSerializer

{ + @Override + public void serialize(Table value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TablePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableDeserializer extends JsonDeserializer
{ + @Override + public Table deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TablePb pb = mapper.readValue(p, TablePb.class); + return Table.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java index b16c3d85a..df8bfdaca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Internal information for D2D sharing that should not be disclosed to external users. */ @Generated +@JsonSerialize(using = TableInternalAttributes.TableInternalAttributesSerializer.class) +@JsonDeserialize(using = TableInternalAttributes.TableInternalAttributesDeserializer.class) public class TableInternalAttributes { /** * Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of @@ -18,19 +29,15 @@ public class TableInternalAttributes { * whitelisted when SEG is enabled on the workspace of the recipient, to allow the recipient users * to query this shared VIEW/FOREIGN_TABLE. */ - @JsonProperty("parent_storage_location") private String parentStorageLocation; /** The cloud storage location of a shard table with DIRECTORY_BASED_TABLE type. */ - @JsonProperty("storage_location") private String storageLocation; /** The type of the shared table. */ - @JsonProperty("type") private TableInternalAttributesSharedTableType typeValue; /** The view definition of a shared view. DEPRECATED. */ - @JsonProperty("view_definition") private String viewDefinition; public TableInternalAttributes setParentStorageLocation(String parentStorageLocation) { @@ -94,4 +101,47 @@ public String toString() { .add("viewDefinition", viewDefinition) .toString(); } + + TableInternalAttributesPb toPb() { + TableInternalAttributesPb pb = new TableInternalAttributesPb(); + pb.setParentStorageLocation(parentStorageLocation); + pb.setStorageLocation(storageLocation); + pb.setType(typeValue); + pb.setViewDefinition(viewDefinition); + + return pb; + } + + static TableInternalAttributes fromPb(TableInternalAttributesPb pb) { + TableInternalAttributes model = new TableInternalAttributes(); + model.setParentStorageLocation(pb.getParentStorageLocation()); + model.setStorageLocation(pb.getStorageLocation()); + model.setType(pb.getType()); + model.setViewDefinition(pb.getViewDefinition()); + + return model; + } + + public static class TableInternalAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + TableInternalAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TableInternalAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TableInternalAttributesDeserializer + extends JsonDeserializer { + @Override + public TableInternalAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TableInternalAttributesPb pb = mapper.readValue(p, TableInternalAttributesPb.class); + return TableInternalAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesPb.java new file mode 100755 index 000000000..d61a5e37a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Internal information for D2D sharing that should not be disclosed to external users. */ +@Generated +class TableInternalAttributesPb { + @JsonProperty("parent_storage_location") + private String parentStorageLocation; + + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("type") + private TableInternalAttributesSharedTableType typeValue; + + @JsonProperty("view_definition") + private String viewDefinition; + + public TableInternalAttributesPb setParentStorageLocation(String parentStorageLocation) { + this.parentStorageLocation = parentStorageLocation; + return this; + } + + public String getParentStorageLocation() { + return parentStorageLocation; + } + + public TableInternalAttributesPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public TableInternalAttributesPb setType(TableInternalAttributesSharedTableType typeValue) { + this.typeValue = typeValue; + return this; + } + + public TableInternalAttributesSharedTableType getType() { + return typeValue; + } + + public TableInternalAttributesPb setViewDefinition(String viewDefinition) { + this.viewDefinition = viewDefinition; + return this; + } + + public String getViewDefinition() { + return viewDefinition; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableInternalAttributesPb that = (TableInternalAttributesPb) o; + return Objects.equals(parentStorageLocation, that.parentStorageLocation) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(viewDefinition, that.viewDefinition); + } + + @Override + public int hashCode() { + return Objects.hash(parentStorageLocation, storageLocation, typeValue, viewDefinition); + } + + @Override + public String toString() { + return new ToStringer(TableInternalAttributesPb.class) + .add("parentStorageLocation", parentStorageLocation) + .add("storageLocation", storageLocation) + .add("typeValue", typeValue) + .add("viewDefinition", viewDefinition) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TablePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TablePb.java new file mode 100755 index 000000000..1d5321f6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TablePb.java @@ -0,0 +1,180 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TablePb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("id") + private String id; + + @JsonProperty("internal_attributes") + private TableInternalAttributes internalAttributes; + + @JsonProperty("materialization_namespace") + private String materializationNamespace; + + @JsonProperty("materialized_table_name") + private String materializedTableName; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("share") + private String share; + + @JsonProperty("share_id") + private String shareId; + + @JsonProperty("tags") + private Collection tags; + + public TablePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public TablePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public TablePb setInternalAttributes(TableInternalAttributes internalAttributes) { + this.internalAttributes = internalAttributes; + return this; + } + + public TableInternalAttributes getInternalAttributes() { + return internalAttributes; + } + + public TablePb setMaterializationNamespace(String materializationNamespace) { + this.materializationNamespace = materializationNamespace; + return this; + } + + public String getMaterializationNamespace() { + return materializationNamespace; + } + + public TablePb setMaterializedTableName(String materializedTableName) { + this.materializedTableName = materializedTableName; + return this; + } + + public String getMaterializedTableName() { + return materializedTableName; + } + + public TablePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public TablePb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public TablePb setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public TablePb setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public TablePb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TablePb that = (TablePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(internalAttributes, that.internalAttributes) + && Objects.equals(materializationNamespace, that.materializationNamespace) + && Objects.equals(materializedTableName, that.materializedTableName) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + id, + internalAttributes, + materializationNamespace, + materializedTableName, + name, + schema, + share, + shareId, + tags); + } + + @Override + public String toString() { + return new ToStringer(TablePb.class) + .add("comment", comment) + .add("id", id) + .add("internalAttributes", internalAttributes) + .add("materializationNamespace", materializationNamespace) + .add("materializedTableName", materializedTableName) + .add("name", name) + .add("schema", schema) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java index d318fe872..01b3f9d0b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java @@ -3,26 +3,35 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update recipient federation policy */ @Generated +@JsonSerialize(using = UpdateFederationPolicyRequest.UpdateFederationPolicyRequestSerializer.class) +@JsonDeserialize( + using = UpdateFederationPolicyRequest.UpdateFederationPolicyRequestDeserializer.class) public class UpdateFederationPolicyRequest { /** Name of the policy. This is the name of the current name of the policy. */ - @JsonIgnore private String name; + private String name; /** */ - @JsonProperty("policy") private FederationPolicy policy; /** * Name of the recipient. This is the name of the recipient for which the policy is being updated. */ - @JsonIgnore private String recipientName; + private String recipientName; /** * The field mask specifies which fields of the policy to update. To specify multiple fields in @@ -31,8 +40,6 @@ public class UpdateFederationPolicyRequest { * policy provided in the update request will overwrite the corresponding fields in the existing * policy. Example value: 'comment,oidc_policy.audiences'. */ - @JsonIgnore - @QueryParam("update_mask") private String updateMask; public UpdateFederationPolicyRequest setName(String name) { @@ -96,4 +103,48 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateFederationPolicyRequestPb toPb() { + UpdateFederationPolicyRequestPb pb = new UpdateFederationPolicyRequestPb(); + pb.setName(name); + pb.setPolicy(policy); + pb.setRecipientName(recipientName); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateFederationPolicyRequest fromPb(UpdateFederationPolicyRequestPb pb) { + UpdateFederationPolicyRequest model = new UpdateFederationPolicyRequest(); + model.setName(pb.getName()); + model.setPolicy(pb.getPolicy()); + model.setRecipientName(pb.getRecipientName()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateFederationPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateFederationPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateFederationPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateFederationPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateFederationPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateFederationPolicyRequestPb pb = + mapper.readValue(p, UpdateFederationPolicyRequestPb.class); + return UpdateFederationPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequestPb.java new file mode 100755 index 000000000..c2541c901 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update recipient federation policy */ +@Generated +class UpdateFederationPolicyRequestPb { + @JsonIgnore private String name; + + @JsonProperty("policy") + private FederationPolicy policy; + + @JsonIgnore private String recipientName; + + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateFederationPolicyRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateFederationPolicyRequestPb setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public UpdateFederationPolicyRequestPb setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + public UpdateFederationPolicyRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateFederationPolicyRequestPb that = (UpdateFederationPolicyRequestPb) o; + return Objects.equals(name, that.name) + && Objects.equals(policy, that.policy) + && Objects.equals(recipientName, that.recipientName) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(name, policy, recipientName, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateFederationPolicyRequestPb.class) + .add("name", name) + .add("policy", policy) + .add("recipientName", recipientName) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java index c6d0621dc..607aaf6d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateProvider.UpdateProviderSerializer.class) +@JsonDeserialize(using = UpdateProvider.UpdateProviderDeserializer.class) public class UpdateProvider { /** Description about the provider. */ - @JsonProperty("comment") private String comment; /** Name of the provider. */ - @JsonIgnore private String name; + private String name; /** New name for the provider. */ - @JsonProperty("new_name") private String newName; /** Username of Provider owner. */ - @JsonProperty("owner") private String owner; /** * This field is required when the __authentication_type__ is **TOKEN**, * **OAUTH_CLIENT_CREDENTIALS** or not provided. */ - @JsonProperty("recipient_profile_str") private String recipientProfileStr; public UpdateProvider setComment(String comment) { @@ -104,4 +110,46 @@ public String toString() { .add("recipientProfileStr", recipientProfileStr) .toString(); } + + UpdateProviderPb toPb() { + UpdateProviderPb pb = new UpdateProviderPb(); + pb.setComment(comment); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setRecipientProfileStr(recipientProfileStr); + + return pb; + } + + static UpdateProvider fromPb(UpdateProviderPb pb) { + UpdateProvider model = new UpdateProvider(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setRecipientProfileStr(pb.getRecipientProfileStr()); + + return model; + } + + public static class UpdateProviderSerializer extends JsonSerializer { + @Override + public void serialize(UpdateProvider value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateProviderPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateProviderDeserializer extends JsonDeserializer { + @Override + public UpdateProvider deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateProviderPb pb = mapper.readValue(p, UpdateProviderPb.class); + return UpdateProvider.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProviderPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProviderPb.java new file mode 100755 index 000000000..f5e87a42a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProviderPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateProviderPb { + @JsonProperty("comment") + private String comment; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("recipient_profile_str") + private String recipientProfileStr; + + public UpdateProviderPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateProviderPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateProviderPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateProviderPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateProviderPb setRecipientProfileStr(String recipientProfileStr) { + this.recipientProfileStr = recipientProfileStr; + return this; + } + + public String getRecipientProfileStr() { + return recipientProfileStr; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderPb that = (UpdateProviderPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(recipientProfileStr, that.recipientProfileStr); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, newName, owner, recipientProfileStr); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderPb.class) + .add("comment", comment) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .add("recipientProfileStr", recipientProfileStr) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java index 3b896d11f..2f26d339d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java @@ -4,33 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRecipient.UpdateRecipientSerializer.class) +@JsonDeserialize(using = UpdateRecipient.UpdateRecipientDeserializer.class) public class UpdateRecipient { /** Description about the recipient. */ - @JsonProperty("comment") private String comment; /** Expiration timestamp of the token, in epoch milliseconds. */ - @JsonProperty("expiration_time") private Long expirationTime; /** IP Access List */ - @JsonProperty("ip_access_list") private IpAccessList ipAccessList; /** Name of the recipient. */ - @JsonIgnore private String name; + private String name; /** New name for the recipient. . */ - @JsonProperty("new_name") private String newName; /** Username of the recipient owner. */ - @JsonProperty("owner") private String owner; /** @@ -38,7 +43,6 @@ public class UpdateRecipient { * specified properties will override the existing properties. To add and remove properties, one * would need to perform a read-modify-write. */ - @JsonProperty("properties_kvpairs") private SecurablePropertiesKvPairs propertiesKvpairs; public UpdateRecipient setComment(String comment) { @@ -136,4 +140,50 @@ public String toString() { .add("propertiesKvpairs", propertiesKvpairs) .toString(); } + + UpdateRecipientPb toPb() { + UpdateRecipientPb pb = new UpdateRecipientPb(); + pb.setComment(comment); + pb.setExpirationTime(expirationTime); + pb.setIpAccessList(ipAccessList); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setPropertiesKvpairs(propertiesKvpairs); + + return pb; + } + + static UpdateRecipient fromPb(UpdateRecipientPb pb) { + UpdateRecipient model = new UpdateRecipient(); + model.setComment(pb.getComment()); + model.setExpirationTime(pb.getExpirationTime()); + model.setIpAccessList(pb.getIpAccessList()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setPropertiesKvpairs(pb.getPropertiesKvpairs()); + + return model; + } + + public static class UpdateRecipientSerializer extends JsonSerializer { + @Override + public void serialize(UpdateRecipient value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRecipientPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRecipientDeserializer extends JsonDeserializer { + @Override + public UpdateRecipient deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRecipientPb pb = mapper.readValue(p, UpdateRecipientPb.class); + return UpdateRecipient.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipientPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipientPb.java new file mode 100755 index 000000000..fe91efc42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipientPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRecipientPb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("expiration_time") + private Long expirationTime; + + @JsonProperty("ip_access_list") + private IpAccessList ipAccessList; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("properties_kvpairs") + private SecurablePropertiesKvPairs propertiesKvpairs; + + public UpdateRecipientPb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateRecipientPb setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public UpdateRecipientPb setIpAccessList(IpAccessList ipAccessList) { + this.ipAccessList = ipAccessList; + return this; + } + + public IpAccessList getIpAccessList() { + return ipAccessList; + } + + public UpdateRecipientPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateRecipientPb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateRecipientPb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateRecipientPb setPropertiesKvpairs(SecurablePropertiesKvPairs propertiesKvpairs) { + this.propertiesKvpairs = propertiesKvpairs; + return this; + } + + public SecurablePropertiesKvPairs getPropertiesKvpairs() { + return propertiesKvpairs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRecipientPb that = (UpdateRecipientPb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(ipAccessList, that.ipAccessList) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(propertiesKvpairs, that.propertiesKvpairs); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, expirationTime, ipAccessList, name, newName, owner, propertiesKvpairs); + } + + @Override + public String toString() { + return new ToStringer(UpdateRecipientPb.class) + .add("comment", comment) + .add("expirationTime", expirationTime) + .add("ipAccessList", ipAccessList) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .add("propertiesKvpairs", propertiesKvpairs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java index 088633347..e4b9f34b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java @@ -4,34 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateShare.UpdateShareSerializer.class) +@JsonDeserialize(using = UpdateShare.UpdateShareDeserializer.class) public class UpdateShare { /** User-provided free-form text description. */ - @JsonProperty("comment") private String comment; /** The name of the share. */ - @JsonIgnore private String name; + private String name; /** New name for the share. */ - @JsonProperty("new_name") private String newName; /** Username of current owner of share. */ - @JsonProperty("owner") private String owner; /** Storage root URL for the share. */ - @JsonProperty("storage_root") private String storageRoot; /** Array of shared data object updates. */ - @JsonProperty("updates") private Collection updates; public UpdateShare setComment(String comment) { @@ -117,4 +122,47 @@ public String toString() { .add("updates", updates) .toString(); } + + UpdateSharePb toPb() { + UpdateSharePb pb = new UpdateSharePb(); + pb.setComment(comment); + pb.setName(name); + pb.setNewName(newName); + pb.setOwner(owner); + pb.setStorageRoot(storageRoot); + pb.setUpdates(updates); + + return pb; + } + + static UpdateShare fromPb(UpdateSharePb pb) { + UpdateShare model = new UpdateShare(); + model.setComment(pb.getComment()); + model.setName(pb.getName()); + model.setNewName(pb.getNewName()); + model.setOwner(pb.getOwner()); + model.setStorageRoot(pb.getStorageRoot()); + model.setUpdates(pb.getUpdates()); + + return model; + } + + public static class UpdateShareSerializer extends JsonSerializer { + @Override + public void serialize(UpdateShare value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateSharePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateShareDeserializer extends JsonDeserializer { + @Override + public UpdateShare deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateSharePb pb = mapper.readValue(p, UpdateSharePb.class); + return UpdateShare.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePb.java new file mode 100755 index 000000000..32444412f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateSharePb { + @JsonProperty("comment") + private String comment; + + @JsonIgnore private String name; + + @JsonProperty("new_name") + private String newName; + + @JsonProperty("owner") + private String owner; + + @JsonProperty("storage_root") + private String storageRoot; + + @JsonProperty("updates") + private Collection updates; + + public UpdateSharePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateSharePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateSharePb setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateSharePb setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateSharePb setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + public UpdateSharePb setUpdates(Collection updates) { + this.updates = updates; + return this; + } + + public Collection getUpdates() { + return updates; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSharePb that = (UpdateSharePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(storageRoot, that.storageRoot) + && Objects.equals(updates, that.updates); + } + + @Override + public int hashCode() { + return Objects.hash(comment, name, newName, owner, storageRoot, updates); + } + + @Override + public String toString() { + return new ToStringer(UpdateSharePb.class) + .add("comment", comment) + .add("name", name) + .add("newName", newName) + .add("owner", owner) + .add("storageRoot", storageRoot) + .add("updates", updates) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java index 26288362d..7888f8d00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateSharePermissions.UpdateSharePermissionsSerializer.class) +@JsonDeserialize(using = UpdateSharePermissions.UpdateSharePermissionsDeserializer.class) public class UpdateSharePermissions { /** Array of permission changes. */ - @JsonProperty("changes") private Collection changes; /** The name of the share. */ - @JsonIgnore private String name; + private String name; /** Optional. Whether to return the latest permissions list of the share in the response. */ - @JsonProperty("omit_permissions_list") private Boolean omitPermissionsList; public UpdateSharePermissions setChanges(Collection changes) { @@ -72,4 +80,45 @@ public String toString() { .add("omitPermissionsList", omitPermissionsList) .toString(); } + + UpdateSharePermissionsPb toPb() { + UpdateSharePermissionsPb pb = new UpdateSharePermissionsPb(); + pb.setChanges(changes); + pb.setName(name); + pb.setOmitPermissionsList(omitPermissionsList); + + return pb; + } + + static UpdateSharePermissions fromPb(UpdateSharePermissionsPb pb) { + UpdateSharePermissions model = new UpdateSharePermissions(); + model.setChanges(pb.getChanges()); + model.setName(pb.getName()); + model.setOmitPermissionsList(pb.getOmitPermissionsList()); + + return model; + } + + public static class UpdateSharePermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateSharePermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateSharePermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateSharePermissionsDeserializer + extends JsonDeserializer { + @Override + public UpdateSharePermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateSharePermissionsPb pb = mapper.readValue(p, UpdateSharePermissionsPb.class); + return UpdateSharePermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsPb.java new file mode 100755 index 000000000..541c5181e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateSharePermissionsPb { + @JsonProperty("changes") + private Collection changes; + + @JsonIgnore private String name; + + @JsonProperty("omit_permissions_list") + private Boolean omitPermissionsList; + + public UpdateSharePermissionsPb setChanges(Collection changes) { + this.changes = changes; + return this; + } + + public Collection getChanges() { + return changes; + } + + public UpdateSharePermissionsPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateSharePermissionsPb setOmitPermissionsList(Boolean omitPermissionsList) { + this.omitPermissionsList = omitPermissionsList; + return this; + } + + public Boolean getOmitPermissionsList() { + return omitPermissionsList; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSharePermissionsPb that = (UpdateSharePermissionsPb) o; + return Objects.equals(changes, that.changes) + && Objects.equals(name, that.name) + && Objects.equals(omitPermissionsList, that.omitPermissionsList); + } + + @Override + public int hashCode() { + return Objects.hash(changes, name, omitPermissionsList); + } + + @Override + public String toString() { + return new ToStringer(UpdateSharePermissionsPb.class) + .add("changes", changes) + .add("name", name) + .add("omitPermissionsList", omitPermissionsList) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java index f9f789ceb..c0b6b57be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateSharePermissionsResponse.UpdateSharePermissionsResponseSerializer.class) +@JsonDeserialize( + using = UpdateSharePermissionsResponse.UpdateSharePermissionsResponseDeserializer.class) public class UpdateSharePermissionsResponse { /** The privileges assigned to each principal */ - @JsonProperty("privilege_assignments") private Collection privilegeAssignments; public UpdateSharePermissionsResponse setPrivilegeAssignments( @@ -43,4 +55,42 @@ public String toString() { .add("privilegeAssignments", privilegeAssignments) .toString(); } + + UpdateSharePermissionsResponsePb toPb() { + UpdateSharePermissionsResponsePb pb = new UpdateSharePermissionsResponsePb(); + pb.setPrivilegeAssignments(privilegeAssignments); + + return pb; + } + + static UpdateSharePermissionsResponse fromPb(UpdateSharePermissionsResponsePb pb) { + UpdateSharePermissionsResponse model = new UpdateSharePermissionsResponse(); + model.setPrivilegeAssignments(pb.getPrivilegeAssignments()); + + return model; + } + + public static class UpdateSharePermissionsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateSharePermissionsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateSharePermissionsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateSharePermissionsResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateSharePermissionsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateSharePermissionsResponsePb pb = + mapper.readValue(p, UpdateSharePermissionsResponsePb.class); + return UpdateSharePermissionsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponsePb.java new file mode 100755 index 000000000..b7d95b6e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateSharePermissionsResponsePb { + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public UpdateSharePermissionsResponsePb setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSharePermissionsResponsePb that = (UpdateSharePermissionsResponsePb) o; + return Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(UpdateSharePermissionsResponsePb.class) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java index 1224f0464..fd225c22a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java @@ -4,45 +4,48 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Volume.VolumeSerializer.class) +@JsonDeserialize(using = Volume.VolumeDeserializer.class) public class Volume { /** The comment of the volume. */ - @JsonProperty("comment") private String comment; /** * This id maps to the shared_volume_id in database Recipient needs shared_volume_id for recon to * check if this volume is already in recipient's DB or not. */ - @JsonProperty("id") private String id; /** Internal attributes for D2D sharing that should not be disclosed to external users. */ - @JsonProperty("internal_attributes") private VolumeInternalAttributes internalAttributes; /** The name of the volume. */ - @JsonProperty("name") private String name; /** The name of the schema that the volume belongs to. */ - @JsonProperty("schema") private String schema; /** The name of the share that the volume belongs to. */ - @JsonProperty("share") private String share; /** / The id of the share that the volume belongs to. */ - @JsonProperty("share_id") private String shareId; /** The tags of the volume. */ - @JsonProperty("tags") private Collection tags; public Volume setComment(String comment) { @@ -150,4 +153,51 @@ public String toString() { .add("tags", tags) .toString(); } + + VolumePb toPb() { + VolumePb pb = new VolumePb(); + pb.setComment(comment); + pb.setId(id); + pb.setInternalAttributes(internalAttributes); + pb.setName(name); + pb.setSchema(schema); + pb.setShare(share); + pb.setShareId(shareId); + pb.setTags(tags); + + return pb; + } + + static Volume fromPb(VolumePb pb) { + Volume model = new Volume(); + model.setComment(pb.getComment()); + model.setId(pb.getId()); + model.setInternalAttributes(pb.getInternalAttributes()); + model.setName(pb.getName()); + model.setSchema(pb.getSchema()); + model.setShare(pb.getShare()); + model.setShareId(pb.getShareId()); + model.setTags(pb.getTags()); + + return model; + } + + public static class VolumeSerializer extends JsonSerializer { + @Override + public void serialize(Volume value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VolumePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VolumeDeserializer extends JsonDeserializer { + @Override + public Volume deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VolumePb pb = mapper.readValue(p, VolumePb.class); + return Volume.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java index c37dbac83..c845eb9c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Internal information for D2D sharing that should not be disclosed to external users. */ @Generated +@JsonSerialize(using = VolumeInternalAttributes.VolumeInternalAttributesSerializer.class) +@JsonDeserialize(using = VolumeInternalAttributes.VolumeInternalAttributesDeserializer.class) public class VolumeInternalAttributes { /** The cloud storage location of the volume */ - @JsonProperty("storage_location") private String storageLocation; /** The type of the shared volume. */ - @JsonProperty("type") private String typeValue; public VolumeInternalAttributes setStorageLocation(String storageLocation) { @@ -57,4 +66,43 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + VolumeInternalAttributesPb toPb() { + VolumeInternalAttributesPb pb = new VolumeInternalAttributesPb(); + pb.setStorageLocation(storageLocation); + pb.setType(typeValue); + + return pb; + } + + static VolumeInternalAttributes fromPb(VolumeInternalAttributesPb pb) { + VolumeInternalAttributes model = new VolumeInternalAttributes(); + model.setStorageLocation(pb.getStorageLocation()); + model.setType(pb.getType()); + + return model; + } + + public static class VolumeInternalAttributesSerializer + extends JsonSerializer { + @Override + public void serialize( + VolumeInternalAttributes value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VolumeInternalAttributesPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VolumeInternalAttributesDeserializer + extends JsonDeserializer { + @Override + public VolumeInternalAttributes deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VolumeInternalAttributesPb pb = mapper.readValue(p, VolumeInternalAttributesPb.class); + return VolumeInternalAttributes.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributesPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributesPb.java new file mode 100755 index 000000000..5fcc494b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributesPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Internal information for D2D sharing that should not be disclosed to external users. */ +@Generated +class VolumeInternalAttributesPb { + @JsonProperty("storage_location") + private String storageLocation; + + @JsonProperty("type") + private String typeValue; + + public VolumeInternalAttributesPb setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public VolumeInternalAttributesPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VolumeInternalAttributesPb that = (VolumeInternalAttributesPb) o; + return Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(storageLocation, typeValue); + } + + @Override + public String toString() { + return new ToStringer(VolumeInternalAttributesPb.class) + .add("storageLocation", storageLocation) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumePb.java new file mode 100755 index 000000000..c3f85b93d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumePb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class VolumePb { + @JsonProperty("comment") + private String comment; + + @JsonProperty("id") + private String id; + + @JsonProperty("internal_attributes") + private VolumeInternalAttributes internalAttributes; + + @JsonProperty("name") + private String name; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("share") + private String share; + + @JsonProperty("share_id") + private String shareId; + + @JsonProperty("tags") + private Collection tags; + + public VolumePb setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public VolumePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public VolumePb setInternalAttributes(VolumeInternalAttributes internalAttributes) { + this.internalAttributes = internalAttributes; + return this; + } + + public VolumeInternalAttributes getInternalAttributes() { + return internalAttributes; + } + + public VolumePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public VolumePb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public VolumePb setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public VolumePb setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public VolumePb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VolumePb that = (VolumePb) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(internalAttributes, that.internalAttributes) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(comment, id, internalAttributes, name, schema, share, shareId, tags); + } + + @Override + public String toString() { + return new ToStringer(VolumePb.class) + .add("comment", comment) + .add("id", id) + .add("internalAttributes", internalAttributes) + .add("name", name) + .add("schema", schema) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java index 7e1cb0289..f182f9c56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AccessControl.AccessControlSerializer.class) +@JsonDeserialize(using = AccessControl.AccessControlDeserializer.class) public class AccessControl { /** */ - @JsonProperty("group_name") private String groupName; /** * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the * query * `CAN_MANAGE`: Can manage the query */ - @JsonProperty("permission_level") private PermissionLevel permissionLevel; /** */ - @JsonProperty("user_name") private String userName; public AccessControl setGroupName(String groupName) { @@ -74,4 +82,41 @@ public String toString() { .add("userName", userName) .toString(); } + + AccessControlPb toPb() { + AccessControlPb pb = new AccessControlPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setUserName(userName); + + return pb; + } + + static AccessControl fromPb(AccessControlPb pb) { + AccessControl model = new AccessControl(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class AccessControlSerializer extends JsonSerializer { + @Override + public void serialize(AccessControl value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AccessControlPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AccessControlDeserializer extends JsonDeserializer { + @Override + public AccessControl deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AccessControlPb pb = mapper.readValue(p, AccessControlPb.class); + return AccessControl.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControlPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControlPb.java new file mode 100755 index 000000000..4e25fcf4b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControlPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AccessControlPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private PermissionLevel permissionLevel; + + @JsonProperty("user_name") + private String userName; + + public AccessControlPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public AccessControlPb setPermissionLevel(PermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public PermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public AccessControlPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccessControlPb that = (AccessControlPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, userName); + } + + @Override + public String toString() { + return new ToStringer(AccessControlPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java index 7f916cf5f..50f240484 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Alert.AlertSerializer.class) +@JsonDeserialize(using = Alert.AlertDeserializer.class) public class Alert { /** Trigger conditions of the alert. */ - @JsonProperty("condition") private AlertCondition condition; /** The timestamp indicating when the alert was created. */ - @JsonProperty("create_time") private String createTime; /** @@ -22,7 +31,6 @@ public class Alert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_body") private String customBody; /** @@ -31,57 +39,45 @@ public class Alert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_subject") private String customSubject; /** The display name of the alert. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying the alert. */ - @JsonProperty("id") private String id; /** The workspace state of the alert. Used for tracking trashed status. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** Whether to notify alert subscribers when alert returns back to normal. */ - @JsonProperty("notify_on_ok") private Boolean notifyOnOk; /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** The workspace path of the folder containing the alert. */ - @JsonProperty("parent_path") private String parentPath; /** UUID of the query attached to the alert. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it * can be triggered again. If 0 or not specified, the alert will not be triggered again. */ - @JsonProperty("seconds_to_retrigger") private Long secondsToRetrigger; /** * Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not * yet been evaluated or ran into an error during the last evaluation. */ - @JsonProperty("state") private AlertState state; /** Timestamp when the alert was last triggered, if the alert has been triggered before. */ - @JsonProperty("trigger_time") private String triggerTime; /** The timestamp indicating when the alert was updated. */ - @JsonProperty("update_time") private String updateTime; public Alert setCondition(AlertCondition condition) { @@ -281,4 +277,65 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + AlertPb toPb() { + AlertPb pb = new AlertPb(); + pb.setCondition(condition); + pb.setCreateTime(createTime); + pb.setCustomBody(customBody); + pb.setCustomSubject(customSubject); + pb.setDisplayName(displayName); + pb.setId(id); + pb.setLifecycleState(lifecycleState); + pb.setNotifyOnOk(notifyOnOk); + pb.setOwnerUserName(ownerUserName); + pb.setParentPath(parentPath); + pb.setQueryId(queryId); + pb.setSecondsToRetrigger(secondsToRetrigger); + pb.setState(state); + pb.setTriggerTime(triggerTime); + pb.setUpdateTime(updateTime); + + return pb; + } + + static Alert fromPb(AlertPb pb) { + Alert model = new Alert(); + model.setCondition(pb.getCondition()); + model.setCreateTime(pb.getCreateTime()); + model.setCustomBody(pb.getCustomBody()); + model.setCustomSubject(pb.getCustomSubject()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + model.setLifecycleState(pb.getLifecycleState()); + model.setNotifyOnOk(pb.getNotifyOnOk()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setParentPath(pb.getParentPath()); + model.setQueryId(pb.getQueryId()); + model.setSecondsToRetrigger(pb.getSecondsToRetrigger()); + model.setState(pb.getState()); + model.setTriggerTime(pb.getTriggerTime()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class AlertSerializer extends JsonSerializer { + @Override + public void serialize(Alert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertDeserializer extends JsonDeserializer { + @Override + public Alert deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertPb pb = mapper.readValue(p, AlertPb.class); + return Alert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java index 2d3b45fde..745273615 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertCondition.AlertConditionSerializer.class) +@JsonDeserialize(using = AlertCondition.AlertConditionDeserializer.class) public class AlertCondition { /** Alert state if result is empty. */ - @JsonProperty("empty_result_state") private AlertState emptyResultState; /** Operator used for comparison in alert evaluation. */ - @JsonProperty("op") private AlertOperator op; /** Name of the column from the query result to use for comparison in alert evaluation. */ - @JsonProperty("operand") private AlertConditionOperand operand; /** Threshold value used for comparison in alert evaluation. */ - @JsonProperty("threshold") private AlertConditionThreshold threshold; public AlertCondition setEmptyResultState(AlertState emptyResultState) { @@ -86,4 +93,44 @@ public String toString() { .add("threshold", threshold) .toString(); } + + AlertConditionPb toPb() { + AlertConditionPb pb = new AlertConditionPb(); + pb.setEmptyResultState(emptyResultState); + pb.setOp(op); + pb.setOperand(operand); + pb.setThreshold(threshold); + + return pb; + } + + static AlertCondition fromPb(AlertConditionPb pb) { + AlertCondition model = new AlertCondition(); + model.setEmptyResultState(pb.getEmptyResultState()); + model.setOp(pb.getOp()); + model.setOperand(pb.getOperand()); + model.setThreshold(pb.getThreshold()); + + return model; + } + + public static class AlertConditionSerializer extends JsonSerializer { + @Override + public void serialize(AlertCondition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertConditionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertConditionDeserializer extends JsonDeserializer { + @Override + public AlertCondition deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertConditionPb pb = mapper.readValue(p, AlertConditionPb.class); + return AlertCondition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperand.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperand.java index 904df5af4..7ff64262b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperand.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperand.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertConditionOperand.AlertConditionOperandSerializer.class) +@JsonDeserialize(using = AlertConditionOperand.AlertConditionOperandDeserializer.class) public class AlertConditionOperand { /** */ - @JsonProperty("column") private AlertOperandColumn column; public AlertConditionOperand setColumn(AlertOperandColumn column) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(AlertConditionOperand.class).add("column", column).toString(); } + + AlertConditionOperandPb toPb() { + AlertConditionOperandPb pb = new AlertConditionOperandPb(); + pb.setColumn(column); + + return pb; + } + + static AlertConditionOperand fromPb(AlertConditionOperandPb pb) { + AlertConditionOperand model = new AlertConditionOperand(); + model.setColumn(pb.getColumn()); + + return model; + } + + public static class AlertConditionOperandSerializer + extends JsonSerializer { + @Override + public void serialize( + AlertConditionOperand value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertConditionOperandPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertConditionOperandDeserializer + extends JsonDeserializer { + @Override + public AlertConditionOperand deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertConditionOperandPb pb = mapper.readValue(p, AlertConditionOperandPb.class); + return AlertConditionOperand.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperandPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperandPb.java new file mode 100755 index 000000000..d08be3e92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperandPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertConditionOperandPb { + @JsonProperty("column") + private AlertOperandColumn column; + + public AlertConditionOperandPb setColumn(AlertOperandColumn column) { + this.column = column; + return this; + } + + public AlertOperandColumn getColumn() { + return column; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertConditionOperandPb that = (AlertConditionOperandPb) o; + return Objects.equals(column, that.column); + } + + @Override + public int hashCode() { + return Objects.hash(column); + } + + @Override + public String toString() { + return new ToStringer(AlertConditionOperandPb.class).add("column", column).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionPb.java new file mode 100755 index 000000000..704079241 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertConditionPb { + @JsonProperty("empty_result_state") + private AlertState emptyResultState; + + @JsonProperty("op") + private AlertOperator op; + + @JsonProperty("operand") + private AlertConditionOperand operand; + + @JsonProperty("threshold") + private AlertConditionThreshold threshold; + + public AlertConditionPb setEmptyResultState(AlertState emptyResultState) { + this.emptyResultState = emptyResultState; + return this; + } + + public AlertState getEmptyResultState() { + return emptyResultState; + } + + public AlertConditionPb setOp(AlertOperator op) { + this.op = op; + return this; + } + + public AlertOperator getOp() { + return op; + } + + public AlertConditionPb setOperand(AlertConditionOperand operand) { + this.operand = operand; + return this; + } + + public AlertConditionOperand getOperand() { + return operand; + } + + public AlertConditionPb setThreshold(AlertConditionThreshold threshold) { + this.threshold = threshold; + return this; + } + + public AlertConditionThreshold getThreshold() { + return threshold; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertConditionPb that = (AlertConditionPb) o; + return Objects.equals(emptyResultState, that.emptyResultState) + && Objects.equals(op, that.op) + && Objects.equals(operand, that.operand) + && Objects.equals(threshold, that.threshold); + } + + @Override + public int hashCode() { + return Objects.hash(emptyResultState, op, operand, threshold); + } + + @Override + public String toString() { + return new ToStringer(AlertConditionPb.class) + .add("emptyResultState", emptyResultState) + .add("op", op) + .add("operand", operand) + .add("threshold", threshold) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThreshold.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThreshold.java index bd487e3ae..b92e04310 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThreshold.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThreshold.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertConditionThreshold.AlertConditionThresholdSerializer.class) +@JsonDeserialize(using = AlertConditionThreshold.AlertConditionThresholdDeserializer.class) public class AlertConditionThreshold { /** */ - @JsonProperty("value") private AlertOperandValue value; public AlertConditionThreshold setValue(AlertOperandValue value) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(AlertConditionThreshold.class).add("value", value).toString(); } + + AlertConditionThresholdPb toPb() { + AlertConditionThresholdPb pb = new AlertConditionThresholdPb(); + pb.setValue(value); + + return pb; + } + + static AlertConditionThreshold fromPb(AlertConditionThresholdPb pb) { + AlertConditionThreshold model = new AlertConditionThreshold(); + model.setValue(pb.getValue()); + + return model; + } + + public static class AlertConditionThresholdSerializer + extends JsonSerializer { + @Override + public void serialize( + AlertConditionThreshold value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertConditionThresholdPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertConditionThresholdDeserializer + extends JsonDeserializer { + @Override + public AlertConditionThreshold deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertConditionThresholdPb pb = mapper.readValue(p, AlertConditionThresholdPb.class); + return AlertConditionThreshold.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThresholdPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThresholdPb.java new file mode 100755 index 000000000..06c12fab4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThresholdPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertConditionThresholdPb { + @JsonProperty("value") + private AlertOperandValue value; + + public AlertConditionThresholdPb setValue(AlertOperandValue value) { + this.value = value; + return this; + } + + public AlertOperandValue getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertConditionThresholdPb that = (AlertConditionThresholdPb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(AlertConditionThresholdPb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumn.java index a39870d04..54f342a75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumn.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertOperandColumn.AlertOperandColumnSerializer.class) +@JsonDeserialize(using = AlertOperandColumn.AlertOperandColumnDeserializer.class) public class AlertOperandColumn { /** */ - @JsonProperty("name") private String name; public AlertOperandColumn setName(String name) { @@ -39,4 +49,38 @@ public int hashCode() { public String toString() { return new ToStringer(AlertOperandColumn.class).add("name", name).toString(); } + + AlertOperandColumnPb toPb() { + AlertOperandColumnPb pb = new AlertOperandColumnPb(); + pb.setName(name); + + return pb; + } + + static AlertOperandColumn fromPb(AlertOperandColumnPb pb) { + AlertOperandColumn model = new AlertOperandColumn(); + model.setName(pb.getName()); + + return model; + } + + public static class AlertOperandColumnSerializer extends JsonSerializer { + @Override + public void serialize(AlertOperandColumn value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertOperandColumnPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertOperandColumnDeserializer extends JsonDeserializer { + @Override + public AlertOperandColumn deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertOperandColumnPb pb = mapper.readValue(p, AlertOperandColumnPb.class); + return AlertOperandColumn.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumnPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumnPb.java new file mode 100755 index 000000000..ff004e752 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumnPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertOperandColumnPb { + @JsonProperty("name") + private String name; + + public AlertOperandColumnPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertOperandColumnPb that = (AlertOperandColumnPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(AlertOperandColumnPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValue.java index 21ed4e435..dd84acdb5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValue.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertOperandValue.AlertOperandValueSerializer.class) +@JsonDeserialize(using = AlertOperandValue.AlertOperandValueDeserializer.class) public class AlertOperandValue { /** */ - @JsonProperty("bool_value") private Boolean boolValue; /** */ - @JsonProperty("double_value") private Double doubleValue; /** */ - @JsonProperty("string_value") private String stringValue; public AlertOperandValue setBoolValue(Boolean boolValue) { @@ -71,4 +79,42 @@ public String toString() { .add("stringValue", stringValue) .toString(); } + + AlertOperandValuePb toPb() { + AlertOperandValuePb pb = new AlertOperandValuePb(); + pb.setBoolValue(boolValue); + pb.setDoubleValue(doubleValue); + pb.setStringValue(stringValue); + + return pb; + } + + static AlertOperandValue fromPb(AlertOperandValuePb pb) { + AlertOperandValue model = new AlertOperandValue(); + model.setBoolValue(pb.getBoolValue()); + model.setDoubleValue(pb.getDoubleValue()); + model.setStringValue(pb.getStringValue()); + + return model; + } + + public static class AlertOperandValueSerializer extends JsonSerializer { + @Override + public void serialize(AlertOperandValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertOperandValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertOperandValueDeserializer extends JsonDeserializer { + @Override + public AlertOperandValue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertOperandValuePb pb = mapper.readValue(p, AlertOperandValuePb.class); + return AlertOperandValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValuePb.java new file mode 100755 index 000000000..1aeabf5e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValuePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertOperandValuePb { + @JsonProperty("bool_value") + private Boolean boolValue; + + @JsonProperty("double_value") + private Double doubleValue; + + @JsonProperty("string_value") + private String stringValue; + + public AlertOperandValuePb setBoolValue(Boolean boolValue) { + this.boolValue = boolValue; + return this; + } + + public Boolean getBoolValue() { + return boolValue; + } + + public AlertOperandValuePb setDoubleValue(Double doubleValue) { + this.doubleValue = doubleValue; + return this; + } + + public Double getDoubleValue() { + return doubleValue; + } + + public AlertOperandValuePb setStringValue(String stringValue) { + this.stringValue = stringValue; + return this; + } + + public String getStringValue() { + return stringValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertOperandValuePb that = (AlertOperandValuePb) o; + return Objects.equals(boolValue, that.boolValue) + && Objects.equals(doubleValue, that.doubleValue) + && Objects.equals(stringValue, that.stringValue); + } + + @Override + public int hashCode() { + return Objects.hash(boolValue, doubleValue, stringValue); + } + + @Override + public String toString() { + return new ToStringer(AlertOperandValuePb.class) + .add("boolValue", boolValue) + .add("doubleValue", doubleValue) + .add("stringValue", stringValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptions.java index 9ebc23bc5..ed1c35148 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptions.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Alert configuration options. */ @Generated +@JsonSerialize(using = AlertOptions.AlertOptionsSerializer.class) +@JsonDeserialize(using = AlertOptions.AlertOptionsDeserializer.class) public class AlertOptions { /** Name of column in the query result to compare in alert evaluation. */ - @JsonProperty("column") private String column; /** @@ -19,7 +29,6 @@ public class AlertOptions { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_body") private String customBody; /** @@ -28,29 +37,24 @@ public class AlertOptions { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_subject") private String customSubject; /** State that alert evaluates to when query result is empty. */ - @JsonProperty("empty_result_state") private AlertOptionsEmptyResultState emptyResultState; /** * Whether or not the alert is muted. If an alert is muted, it will not notify users and * notification destinations when triggered. */ - @JsonProperty("muted") private Boolean muted; /** Operator used to compare in alert evaluation: `>`, `>=`, `<`, `<=`, `==`, `!=` */ - @JsonProperty("op") private String op; /** * Value used to compare in alert evaluation. Supported types include strings (eg. 'foobar'), * floats (eg. 123.4), and booleans (true). */ - @JsonProperty("value") private Object value; public AlertOptions setColumn(String column) { @@ -147,4 +151,49 @@ public String toString() { .add("value", value) .toString(); } + + AlertOptionsPb toPb() { + AlertOptionsPb pb = new AlertOptionsPb(); + pb.setColumn(column); + pb.setCustomBody(customBody); + pb.setCustomSubject(customSubject); + pb.setEmptyResultState(emptyResultState); + pb.setMuted(muted); + pb.setOp(op); + pb.setValue(value); + + return pb; + } + + static AlertOptions fromPb(AlertOptionsPb pb) { + AlertOptions model = new AlertOptions(); + model.setColumn(pb.getColumn()); + model.setCustomBody(pb.getCustomBody()); + model.setCustomSubject(pb.getCustomSubject()); + model.setEmptyResultState(pb.getEmptyResultState()); + model.setMuted(pb.getMuted()); + model.setOp(pb.getOp()); + model.setValue(pb.getValue()); + + return model; + } + + public static class AlertOptionsSerializer extends JsonSerializer { + @Override + public void serialize(AlertOptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertOptionsDeserializer extends JsonDeserializer { + @Override + public AlertOptions deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertOptionsPb pb = mapper.readValue(p, AlertOptionsPb.class); + return AlertOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptionsPb.java new file mode 100755 index 000000000..4b5dd1ae3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptionsPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Alert configuration options. */ +@Generated +class AlertOptionsPb { + @JsonProperty("column") + private String column; + + @JsonProperty("custom_body") + private String customBody; + + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("empty_result_state") + private AlertOptionsEmptyResultState emptyResultState; + + @JsonProperty("muted") + private Boolean muted; + + @JsonProperty("op") + private String op; + + @JsonProperty("value") + private Object value; + + public AlertOptionsPb setColumn(String column) { + this.column = column; + return this; + } + + public String getColumn() { + return column; + } + + public AlertOptionsPb setCustomBody(String customBody) { + this.customBody = customBody; + return this; + } + + public String getCustomBody() { + return customBody; + } + + public AlertOptionsPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public AlertOptionsPb setEmptyResultState(AlertOptionsEmptyResultState emptyResultState) { + this.emptyResultState = emptyResultState; + return this; + } + + public AlertOptionsEmptyResultState getEmptyResultState() { + return emptyResultState; + } + + public AlertOptionsPb setMuted(Boolean muted) { + this.muted = muted; + return this; + } + + public Boolean getMuted() { + return muted; + } + + public AlertOptionsPb setOp(String op) { + this.op = op; + return this; + } + + public String getOp() { + return op; + } + + public AlertOptionsPb setValue(Object value) { + this.value = value; + return this; + } + + public Object getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertOptionsPb that = (AlertOptionsPb) o; + return Objects.equals(column, that.column) + && Objects.equals(customBody, that.customBody) + && Objects.equals(customSubject, that.customSubject) + && Objects.equals(emptyResultState, that.emptyResultState) + && Objects.equals(muted, that.muted) + && Objects.equals(op, that.op) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(column, customBody, customSubject, emptyResultState, muted, op, value); + } + + @Override + public String toString() { + return new ToStringer(AlertOptionsPb.class) + .add("column", column) + .add("customBody", customBody) + .add("customSubject", customSubject) + .add("emptyResultState", emptyResultState) + .add("muted", muted) + .add("op", op) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertPb.java new file mode 100755 index 000000000..fd1d56abe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertPb.java @@ -0,0 +1,254 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertPb { + @JsonProperty("condition") + private AlertCondition condition; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("custom_body") + private String customBody; + + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("seconds_to_retrigger") + private Long secondsToRetrigger; + + @JsonProperty("state") + private AlertState state; + + @JsonProperty("trigger_time") + private String triggerTime; + + @JsonProperty("update_time") + private String updateTime; + + public AlertPb setCondition(AlertCondition condition) { + this.condition = condition; + return this; + } + + public AlertCondition getCondition() { + return condition; + } + + public AlertPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public AlertPb setCustomBody(String customBody) { + this.customBody = customBody; + return this; + } + + public String getCustomBody() { + return customBody; + } + + public AlertPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public AlertPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public AlertPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AlertPb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public AlertPb setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + + public AlertPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public AlertPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public AlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public AlertPb setSecondsToRetrigger(Long secondsToRetrigger) { + this.secondsToRetrigger = secondsToRetrigger; + return this; + } + + public Long getSecondsToRetrigger() { + return secondsToRetrigger; + } + + public AlertPb setState(AlertState state) { + this.state = state; + return this; + } + + public AlertState getState() { + return state; + } + + public AlertPb setTriggerTime(String triggerTime) { + this.triggerTime = triggerTime; + return this; + } + + public String getTriggerTime() { + return triggerTime; + } + + public AlertPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertPb that = (AlertPb) o; + return Objects.equals(condition, that.condition) + && Objects.equals(createTime, that.createTime) + && Objects.equals(customBody, that.customBody) + && Objects.equals(customSubject, that.customSubject) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(notifyOnOk, that.notifyOnOk) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(queryId, that.queryId) + && Objects.equals(secondsToRetrigger, that.secondsToRetrigger) + && Objects.equals(state, that.state) + && Objects.equals(triggerTime, that.triggerTime) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + condition, + createTime, + customBody, + customSubject, + displayName, + id, + lifecycleState, + notifyOnOk, + ownerUserName, + parentPath, + queryId, + secondsToRetrigger, + state, + triggerTime, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(AlertPb.class) + .add("condition", condition) + .add("createTime", createTime) + .add("customBody", customBody) + .add("customSubject", customSubject) + .add("displayName", displayName) + .add("id", id) + .add("lifecycleState", lifecycleState) + .add("notifyOnOk", notifyOnOk) + .add("ownerUserName", ownerUserName) + .add("parentPath", parentPath) + .add("queryId", queryId) + .add("secondsToRetrigger", secondsToRetrigger) + .add("state", state) + .add("triggerTime", triggerTime) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java index f0a02211e..515192dad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AlertQuery.AlertQuerySerializer.class) +@JsonDeserialize(using = AlertQuery.AlertQueryDeserializer.class) public class AlertQuery { /** The timestamp when this query was created. */ - @JsonProperty("created_at") private String createdAt; /** @@ -20,17 +30,14 @@ public class AlertQuery { * *

[Learn more]: https://docs.databricks.com/api/workspace/datasources/list */ - @JsonProperty("data_source_id") private String dataSourceId; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** Query ID. */ - @JsonProperty("id") private String id; /** @@ -38,14 +45,12 @@ public class AlertQuery { * in search results. If this boolean is `true`, the `options` property for this query includes a * `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days. */ - @JsonProperty("is_archived") private Boolean isArchived; /** * Whether the query is a draft. Draft queries only appear in list views for their owners. * Visualizations from draft queries cannot appear on dashboards. */ - @JsonProperty("is_draft") private Boolean isDraft; /** @@ -53,31 +58,24 @@ public class AlertQuery { * Boolean parameter to `true` if a query either does not use any text type parameters or uses a * data source type where text type parameters are handled safely. */ - @JsonProperty("is_safe") private Boolean isSafe; /** The title of this query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("options") private QueryOptions options; /** The text of the query to be run. */ - @JsonProperty("query") private String query; /** */ - @JsonProperty("tags") private Collection tags; /** The timestamp at which this query was last updated. */ - @JsonProperty("updated_at") private String updatedAt; /** The ID of the user who owns the query. */ - @JsonProperty("user_id") private Long userId; public AlertQuery setCreatedAt(String createdAt) { @@ -253,4 +251,61 @@ public String toString() { .add("userId", userId) .toString(); } + + AlertQueryPb toPb() { + AlertQueryPb pb = new AlertQueryPb(); + pb.setCreatedAt(createdAt); + pb.setDataSourceId(dataSourceId); + pb.setDescription(description); + pb.setId(id); + pb.setIsArchived(isArchived); + pb.setIsDraft(isDraft); + pb.setIsSafe(isSafe); + pb.setName(name); + pb.setOptions(options); + pb.setQuery(query); + pb.setTags(tags); + pb.setUpdatedAt(updatedAt); + pb.setUserId(userId); + + return pb; + } + + static AlertQuery fromPb(AlertQueryPb pb) { + AlertQuery model = new AlertQuery(); + model.setCreatedAt(pb.getCreatedAt()); + model.setDataSourceId(pb.getDataSourceId()); + model.setDescription(pb.getDescription()); + model.setId(pb.getId()); + model.setIsArchived(pb.getIsArchived()); + model.setIsDraft(pb.getIsDraft()); + model.setIsSafe(pb.getIsSafe()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setQuery(pb.getQuery()); + model.setTags(pb.getTags()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUserId(pb.getUserId()); + + return model; + } + + public static class AlertQuerySerializer extends JsonSerializer { + @Override + public void serialize(AlertQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertQueryDeserializer extends JsonDeserializer { + @Override + public AlertQuery deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertQueryPb pb = mapper.readValue(p, AlertQueryPb.class); + return AlertQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQueryPb.java new file mode 100755 index 000000000..55f140b50 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQueryPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AlertQueryPb { + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("data_source_id") + private String dataSourceId; + + @JsonProperty("description") + private String description; + + @JsonProperty("id") + private String id; + + @JsonProperty("is_archived") + private Boolean isArchived; + + @JsonProperty("is_draft") + private Boolean isDraft; + + @JsonProperty("is_safe") + private Boolean isSafe; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private QueryOptions options; + + @JsonProperty("query") + private String query; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("updated_at") + private String updatedAt; + + @JsonProperty("user_id") + private Long userId; + + public AlertQueryPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public AlertQueryPb setDataSourceId(String dataSourceId) { + this.dataSourceId = dataSourceId; + return this; + } + + public String getDataSourceId() { + return dataSourceId; + } + + public AlertQueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AlertQueryPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AlertQueryPb setIsArchived(Boolean isArchived) { + this.isArchived = isArchived; + return this; + } + + public Boolean getIsArchived() { + return isArchived; + } + + public AlertQueryPb setIsDraft(Boolean isDraft) { + this.isDraft = isDraft; + return this; + } + + public Boolean getIsDraft() { + return isDraft; + } + + public AlertQueryPb setIsSafe(Boolean isSafe) { + this.isSafe = isSafe; + return this; + } + + public Boolean getIsSafe() { + return isSafe; + } + + public AlertQueryPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AlertQueryPb setOptions(QueryOptions options) { + this.options = options; + return this; + } + + public QueryOptions getOptions() { + return options; + } + + public AlertQueryPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public AlertQueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public AlertQueryPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + public AlertQueryPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertQueryPb that = (AlertQueryPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(dataSourceId, that.dataSourceId) + && Objects.equals(description, that.description) + && Objects.equals(id, that.id) + && Objects.equals(isArchived, that.isArchived) + && Objects.equals(isDraft, that.isDraft) + && Objects.equals(isSafe, that.isSafe) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(query, that.query) + && Objects.equals(tags, that.tags) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, + dataSourceId, + description, + id, + isArchived, + isDraft, + isSafe, + name, + options, + query, + tags, + updatedAt, + userId); + } + + @Override + public String toString() { + return new ToStringer(AlertQueryPb.class) + .add("createdAt", createdAt) + .add("dataSourceId", dataSourceId) + .add("description", description) + .add("id", id) + .add("isArchived", isArchived) + .add("isDraft", isDraft) + .add("isSafe", isSafe) + .add("name", name) + .add("options", options) + .add("query", query) + .add("tags", tags) + .add("updatedAt", updatedAt) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java index cbe402a72..d83c6b15e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java @@ -4,68 +4,65 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2.AlertV2Serializer.class) +@JsonDeserialize(using = AlertV2.AlertV2Deserializer.class) public class AlertV2 { /** The timestamp indicating when the alert was created. */ - @JsonProperty("create_time") private String createTime; /** Custom description for the alert. support mustache template. */ - @JsonProperty("custom_description") private String customDescription; /** Custom summary for the alert. support mustache template. */ - @JsonProperty("custom_summary") private String customSummary; /** The display name of the alert. */ - @JsonProperty("display_name") private String displayName; /** */ - @JsonProperty("evaluation") private AlertV2Evaluation evaluation; /** UUID identifying the alert. */ - @JsonProperty("id") private String id; /** Indicates whether the query is trashed. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** * The workspace path of the folder containing the alert. Can only be set on create, and cannot be * updated. */ - @JsonProperty("parent_path") private String parentPath; /** Text of the query to be run. */ - @JsonProperty("query_text") private String queryText; /** The run as username. This field is set to "Unavailable" if the user has been deleted. */ - @JsonProperty("run_as_user_name") private String runAsUserName; /** */ - @JsonProperty("schedule") private CronSchedule schedule; /** The timestamp indicating when the alert was updated. */ - @JsonProperty("update_time") private String updateTime; /** ID of the SQL warehouse attached to the alert. */ - @JsonProperty("warehouse_id") private String warehouseId; public AlertV2 setCreateTime(String createTime) { @@ -253,4 +250,63 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + AlertV2Pb toPb() { + AlertV2Pb pb = new AlertV2Pb(); + pb.setCreateTime(createTime); + pb.setCustomDescription(customDescription); + pb.setCustomSummary(customSummary); + pb.setDisplayName(displayName); + pb.setEvaluation(evaluation); + pb.setId(id); + pb.setLifecycleState(lifecycleState); + pb.setOwnerUserName(ownerUserName); + pb.setParentPath(parentPath); + pb.setQueryText(queryText); + pb.setRunAsUserName(runAsUserName); + pb.setSchedule(schedule); + pb.setUpdateTime(updateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static AlertV2 fromPb(AlertV2Pb pb) { + AlertV2 model = new AlertV2(); + model.setCreateTime(pb.getCreateTime()); + model.setCustomDescription(pb.getCustomDescription()); + model.setCustomSummary(pb.getCustomSummary()); + model.setDisplayName(pb.getDisplayName()); + model.setEvaluation(pb.getEvaluation()); + model.setId(pb.getId()); + model.setLifecycleState(pb.getLifecycleState()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setParentPath(pb.getParentPath()); + model.setQueryText(pb.getQueryText()); + model.setRunAsUserName(pb.getRunAsUserName()); + model.setSchedule(pb.getSchedule()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class AlertV2Serializer extends JsonSerializer { + @Override + public void serialize(AlertV2 value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2Pb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2Deserializer extends JsonDeserializer { + @Override + public AlertV2 deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2Pb pb = mapper.readValue(p, AlertV2Pb.class); + return AlertV2.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java index ad55cc8ea..4b4eed1c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2Evaluation.AlertV2EvaluationSerializer.class) +@JsonDeserialize(using = AlertV2Evaluation.AlertV2EvaluationDeserializer.class) public class AlertV2Evaluation { /** Operator used for comparison in alert evaluation. */ - @JsonProperty("comparison_operator") private ComparisonOperator comparisonOperator; /** Alert state if result is empty. */ - @JsonProperty("empty_result_state") private AlertEvaluationState emptyResultState; /** Timestamp of the last evaluation. */ - @JsonProperty("last_evaluated_at") private String lastEvaluatedAt; /** User or Notification Destination to notify when alert is triggered. */ - @JsonProperty("notification") private AlertV2Notification notification; /** Source column from result to use to evaluate alert */ - @JsonProperty("source") private AlertV2OperandColumn source; /** Latest state of alert evaluation. */ - @JsonProperty("state") private AlertEvaluationState state; /** Threshold to user for alert evaluation, can be a column or a value. */ - @JsonProperty("threshold") private AlertV2Operand threshold; public AlertV2Evaluation setComparisonOperator(ComparisonOperator comparisonOperator) { @@ -138,4 +142,50 @@ public String toString() { .add("threshold", threshold) .toString(); } + + AlertV2EvaluationPb toPb() { + AlertV2EvaluationPb pb = new AlertV2EvaluationPb(); + pb.setComparisonOperator(comparisonOperator); + pb.setEmptyResultState(emptyResultState); + pb.setLastEvaluatedAt(lastEvaluatedAt); + pb.setNotification(notification); + pb.setSource(source); + pb.setState(state); + pb.setThreshold(threshold); + + return pb; + } + + static AlertV2Evaluation fromPb(AlertV2EvaluationPb pb) { + AlertV2Evaluation model = new AlertV2Evaluation(); + model.setComparisonOperator(pb.getComparisonOperator()); + model.setEmptyResultState(pb.getEmptyResultState()); + model.setLastEvaluatedAt(pb.getLastEvaluatedAt()); + model.setNotification(pb.getNotification()); + model.setSource(pb.getSource()); + model.setState(pb.getState()); + model.setThreshold(pb.getThreshold()); + + return model; + } + + public static class AlertV2EvaluationSerializer extends JsonSerializer { + @Override + public void serialize(AlertV2Evaluation value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2EvaluationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2EvaluationDeserializer extends JsonDeserializer { + @Override + public AlertV2Evaluation deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2EvaluationPb pb = mapper.readValue(p, AlertV2EvaluationPb.class); + return AlertV2Evaluation.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2EvaluationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2EvaluationPb.java new file mode 100755 index 000000000..cea30845f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2EvaluationPb.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2EvaluationPb { + @JsonProperty("comparison_operator") + private ComparisonOperator comparisonOperator; + + @JsonProperty("empty_result_state") + private AlertEvaluationState emptyResultState; + + @JsonProperty("last_evaluated_at") + private String lastEvaluatedAt; + + @JsonProperty("notification") + private AlertV2Notification notification; + + @JsonProperty("source") + private AlertV2OperandColumn source; + + @JsonProperty("state") + private AlertEvaluationState state; + + @JsonProperty("threshold") + private AlertV2Operand threshold; + + public AlertV2EvaluationPb setComparisonOperator(ComparisonOperator comparisonOperator) { + this.comparisonOperator = comparisonOperator; + return this; + } + + public ComparisonOperator getComparisonOperator() { + return comparisonOperator; + } + + public AlertV2EvaluationPb setEmptyResultState(AlertEvaluationState emptyResultState) { + this.emptyResultState = emptyResultState; + return this; + } + + public AlertEvaluationState getEmptyResultState() { + return emptyResultState; + } + + public AlertV2EvaluationPb setLastEvaluatedAt(String lastEvaluatedAt) { + this.lastEvaluatedAt = lastEvaluatedAt; + return this; + } + + public String getLastEvaluatedAt() { + return lastEvaluatedAt; + } + + public AlertV2EvaluationPb setNotification(AlertV2Notification notification) { + this.notification = notification; + return this; + } + + public AlertV2Notification getNotification() { + return notification; + } + + public AlertV2EvaluationPb setSource(AlertV2OperandColumn source) { + this.source = source; + return this; + } + + public AlertV2OperandColumn getSource() { + return source; + } + + public AlertV2EvaluationPb setState(AlertEvaluationState state) { + this.state = state; + return this; + } + + public AlertEvaluationState getState() { + return state; + } + + public AlertV2EvaluationPb setThreshold(AlertV2Operand threshold) { + this.threshold = threshold; + return this; + } + + public AlertV2Operand getThreshold() { + return threshold; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2EvaluationPb that = (AlertV2EvaluationPb) o; + return Objects.equals(comparisonOperator, that.comparisonOperator) + && Objects.equals(emptyResultState, that.emptyResultState) + && Objects.equals(lastEvaluatedAt, that.lastEvaluatedAt) + && Objects.equals(notification, that.notification) + && Objects.equals(source, that.source) + && Objects.equals(state, that.state) + && Objects.equals(threshold, that.threshold); + } + + @Override + public int hashCode() { + return Objects.hash( + comparisonOperator, + emptyResultState, + lastEvaluatedAt, + notification, + source, + state, + threshold); + } + + @Override + public String toString() { + return new ToStringer(AlertV2EvaluationPb.class) + .add("comparisonOperator", comparisonOperator) + .add("emptyResultState", emptyResultState) + .add("lastEvaluatedAt", lastEvaluatedAt) + .add("notification", notification) + .add("source", source) + .add("state", state) + .add("threshold", threshold) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java index 6fa20c245..be84d9bcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2Notification.AlertV2NotificationSerializer.class) +@JsonDeserialize(using = AlertV2Notification.AlertV2NotificationDeserializer.class) public class AlertV2Notification { /** Whether to notify alert subscribers when alert returns back to normal. */ - @JsonProperty("notify_on_ok") private Boolean notifyOnOk; /** * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it * can be triggered again. If 0 or not specified, the alert will not be triggered again. */ - @JsonProperty("retrigger_seconds") private Long retriggerSeconds; /** */ - @JsonProperty("subscriptions") private Collection subscriptions; public AlertV2Notification setNotifyOnOk(Boolean notifyOnOk) { @@ -75,4 +83,43 @@ public String toString() { .add("subscriptions", subscriptions) .toString(); } + + AlertV2NotificationPb toPb() { + AlertV2NotificationPb pb = new AlertV2NotificationPb(); + pb.setNotifyOnOk(notifyOnOk); + pb.setRetriggerSeconds(retriggerSeconds); + pb.setSubscriptions(subscriptions); + + return pb; + } + + static AlertV2Notification fromPb(AlertV2NotificationPb pb) { + AlertV2Notification model = new AlertV2Notification(); + model.setNotifyOnOk(pb.getNotifyOnOk()); + model.setRetriggerSeconds(pb.getRetriggerSeconds()); + model.setSubscriptions(pb.getSubscriptions()); + + return model; + } + + public static class AlertV2NotificationSerializer extends JsonSerializer { + @Override + public void serialize(AlertV2Notification value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2NotificationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2NotificationDeserializer + extends JsonDeserializer { + @Override + public AlertV2Notification deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2NotificationPb pb = mapper.readValue(p, AlertV2NotificationPb.class); + return AlertV2Notification.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2NotificationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2NotificationPb.java new file mode 100755 index 000000000..37b3dd681 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2NotificationPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class AlertV2NotificationPb { + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + + @JsonProperty("retrigger_seconds") + private Long retriggerSeconds; + + @JsonProperty("subscriptions") + private Collection subscriptions; + + public AlertV2NotificationPb setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + + public AlertV2NotificationPb setRetriggerSeconds(Long retriggerSeconds) { + this.retriggerSeconds = retriggerSeconds; + return this; + } + + public Long getRetriggerSeconds() { + return retriggerSeconds; + } + + public AlertV2NotificationPb setSubscriptions(Collection subscriptions) { + this.subscriptions = subscriptions; + return this; + } + + public Collection getSubscriptions() { + return subscriptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2NotificationPb that = (AlertV2NotificationPb) o; + return Objects.equals(notifyOnOk, that.notifyOnOk) + && Objects.equals(retriggerSeconds, that.retriggerSeconds) + && Objects.equals(subscriptions, that.subscriptions); + } + + @Override + public int hashCode() { + return Objects.hash(notifyOnOk, retriggerSeconds, subscriptions); + } + + @Override + public String toString() { + return new ToStringer(AlertV2NotificationPb.class) + .add("notifyOnOk", notifyOnOk) + .add("retriggerSeconds", retriggerSeconds) + .add("subscriptions", subscriptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java index 947ee092f..7aab91f0d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2Operand.AlertV2OperandSerializer.class) +@JsonDeserialize(using = AlertV2Operand.AlertV2OperandDeserializer.class) public class AlertV2Operand { /** */ - @JsonProperty("column") private AlertV2OperandColumn column; /** */ - @JsonProperty("value") private AlertV2OperandValue value; public AlertV2Operand setColumn(AlertV2OperandColumn column) { @@ -55,4 +64,40 @@ public String toString() { .add("value", value) .toString(); } + + AlertV2OperandPb toPb() { + AlertV2OperandPb pb = new AlertV2OperandPb(); + pb.setColumn(column); + pb.setValue(value); + + return pb; + } + + static AlertV2Operand fromPb(AlertV2OperandPb pb) { + AlertV2Operand model = new AlertV2Operand(); + model.setColumn(pb.getColumn()); + model.setValue(pb.getValue()); + + return model; + } + + public static class AlertV2OperandSerializer extends JsonSerializer { + @Override + public void serialize(AlertV2Operand value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2OperandPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2OperandDeserializer extends JsonDeserializer { + @Override + public AlertV2Operand deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2OperandPb pb = mapper.readValue(p, AlertV2OperandPb.class); + return AlertV2Operand.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java index 2e8776e18..abb334a9b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2OperandColumn.AlertV2OperandColumnSerializer.class) +@JsonDeserialize(using = AlertV2OperandColumn.AlertV2OperandColumnDeserializer.class) public class AlertV2OperandColumn { /** */ - @JsonProperty("aggregation") private Aggregation aggregation; /** */ - @JsonProperty("display") private String display; /** */ - @JsonProperty("name") private String name; public AlertV2OperandColumn setAggregation(Aggregation aggregation) { @@ -71,4 +79,44 @@ public String toString() { .add("name", name) .toString(); } + + AlertV2OperandColumnPb toPb() { + AlertV2OperandColumnPb pb = new AlertV2OperandColumnPb(); + pb.setAggregation(aggregation); + pb.setDisplay(display); + pb.setName(name); + + return pb; + } + + static AlertV2OperandColumn fromPb(AlertV2OperandColumnPb pb) { + AlertV2OperandColumn model = new AlertV2OperandColumn(); + model.setAggregation(pb.getAggregation()); + model.setDisplay(pb.getDisplay()); + model.setName(pb.getName()); + + return model; + } + + public static class AlertV2OperandColumnSerializer extends JsonSerializer { + @Override + public void serialize( + AlertV2OperandColumn value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2OperandColumnPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2OperandColumnDeserializer + extends JsonDeserializer { + @Override + public AlertV2OperandColumn deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2OperandColumnPb pb = mapper.readValue(p, AlertV2OperandColumnPb.class); + return AlertV2OperandColumn.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumnPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumnPb.java new file mode 100755 index 000000000..97cd58bfa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumnPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2OperandColumnPb { + @JsonProperty("aggregation") + private Aggregation aggregation; + + @JsonProperty("display") + private String display; + + @JsonProperty("name") + private String name; + + public AlertV2OperandColumnPb setAggregation(Aggregation aggregation) { + this.aggregation = aggregation; + return this; + } + + public Aggregation getAggregation() { + return aggregation; + } + + public AlertV2OperandColumnPb setDisplay(String display) { + this.display = display; + return this; + } + + public String getDisplay() { + return display; + } + + public AlertV2OperandColumnPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2OperandColumnPb that = (AlertV2OperandColumnPb) o; + return Objects.equals(aggregation, that.aggregation) + && Objects.equals(display, that.display) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(aggregation, display, name); + } + + @Override + public String toString() { + return new ToStringer(AlertV2OperandColumnPb.class) + .add("aggregation", aggregation) + .add("display", display) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandPb.java new file mode 100755 index 000000000..b517d3cd0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2OperandPb { + @JsonProperty("column") + private AlertV2OperandColumn column; + + @JsonProperty("value") + private AlertV2OperandValue value; + + public AlertV2OperandPb setColumn(AlertV2OperandColumn column) { + this.column = column; + return this; + } + + public AlertV2OperandColumn getColumn() { + return column; + } + + public AlertV2OperandPb setValue(AlertV2OperandValue value) { + this.value = value; + return this; + } + + public AlertV2OperandValue getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2OperandPb that = (AlertV2OperandPb) o; + return Objects.equals(column, that.column) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(column, value); + } + + @Override + public String toString() { + return new ToStringer(AlertV2OperandPb.class) + .add("column", column) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java index c1e883802..e7ac9d38c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2OperandValue.AlertV2OperandValueSerializer.class) +@JsonDeserialize(using = AlertV2OperandValue.AlertV2OperandValueDeserializer.class) public class AlertV2OperandValue { /** */ - @JsonProperty("bool_value") private Boolean boolValue; /** */ - @JsonProperty("double_value") private Double doubleValue; /** */ - @JsonProperty("string_value") private String stringValue; public AlertV2OperandValue setBoolValue(Boolean boolValue) { @@ -71,4 +79,43 @@ public String toString() { .add("stringValue", stringValue) .toString(); } + + AlertV2OperandValuePb toPb() { + AlertV2OperandValuePb pb = new AlertV2OperandValuePb(); + pb.setBoolValue(boolValue); + pb.setDoubleValue(doubleValue); + pb.setStringValue(stringValue); + + return pb; + } + + static AlertV2OperandValue fromPb(AlertV2OperandValuePb pb) { + AlertV2OperandValue model = new AlertV2OperandValue(); + model.setBoolValue(pb.getBoolValue()); + model.setDoubleValue(pb.getDoubleValue()); + model.setStringValue(pb.getStringValue()); + + return model; + } + + public static class AlertV2OperandValueSerializer extends JsonSerializer { + @Override + public void serialize(AlertV2OperandValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2OperandValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2OperandValueDeserializer + extends JsonDeserializer { + @Override + public AlertV2OperandValue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2OperandValuePb pb = mapper.readValue(p, AlertV2OperandValuePb.class); + return AlertV2OperandValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValuePb.java new file mode 100755 index 000000000..4a5b48f78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValuePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2OperandValuePb { + @JsonProperty("bool_value") + private Boolean boolValue; + + @JsonProperty("double_value") + private Double doubleValue; + + @JsonProperty("string_value") + private String stringValue; + + public AlertV2OperandValuePb setBoolValue(Boolean boolValue) { + this.boolValue = boolValue; + return this; + } + + public Boolean getBoolValue() { + return boolValue; + } + + public AlertV2OperandValuePb setDoubleValue(Double doubleValue) { + this.doubleValue = doubleValue; + return this; + } + + public Double getDoubleValue() { + return doubleValue; + } + + public AlertV2OperandValuePb setStringValue(String stringValue) { + this.stringValue = stringValue; + return this; + } + + public String getStringValue() { + return stringValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2OperandValuePb that = (AlertV2OperandValuePb) o; + return Objects.equals(boolValue, that.boolValue) + && Objects.equals(doubleValue, that.doubleValue) + && Objects.equals(stringValue, that.stringValue); + } + + @Override + public int hashCode() { + return Objects.hash(boolValue, doubleValue, stringValue); + } + + @Override + public String toString() { + return new ToStringer(AlertV2OperandValuePb.class) + .add("boolValue", boolValue) + .add("doubleValue", doubleValue) + .add("stringValue", stringValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Pb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Pb.java new file mode 100755 index 000000000..fc1ef7251 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Pb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2Pb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("custom_description") + private String customDescription; + + @JsonProperty("custom_summary") + private String customSummary; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("evaluation") + private AlertV2Evaluation evaluation; + + @JsonProperty("id") + private String id; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("run_as_user_name") + private String runAsUserName; + + @JsonProperty("schedule") + private CronSchedule schedule; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public AlertV2Pb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public AlertV2Pb setCustomDescription(String customDescription) { + this.customDescription = customDescription; + return this; + } + + public String getCustomDescription() { + return customDescription; + } + + public AlertV2Pb setCustomSummary(String customSummary) { + this.customSummary = customSummary; + return this; + } + + public String getCustomSummary() { + return customSummary; + } + + public AlertV2Pb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public AlertV2Pb setEvaluation(AlertV2Evaluation evaluation) { + this.evaluation = evaluation; + return this; + } + + public AlertV2Evaluation getEvaluation() { + return evaluation; + } + + public AlertV2Pb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AlertV2Pb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public AlertV2Pb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public AlertV2Pb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public AlertV2Pb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public AlertV2Pb setRunAsUserName(String runAsUserName) { + this.runAsUserName = runAsUserName; + return this; + } + + public String getRunAsUserName() { + return runAsUserName; + } + + public AlertV2Pb setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public AlertV2Pb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public AlertV2Pb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2Pb that = (AlertV2Pb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(customDescription, that.customDescription) + && Objects.equals(customSummary, that.customSummary) + && Objects.equals(displayName, that.displayName) + && Objects.equals(evaluation, that.evaluation) + && Objects.equals(id, that.id) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(queryText, that.queryText) + && Objects.equals(runAsUserName, that.runAsUserName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + customDescription, + customSummary, + displayName, + evaluation, + id, + lifecycleState, + ownerUserName, + parentPath, + queryText, + runAsUserName, + schedule, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(AlertV2Pb.class) + .add("createTime", createTime) + .add("customDescription", customDescription) + .add("customSummary", customSummary) + .add("displayName", displayName) + .add("evaluation", evaluation) + .add("id", id) + .add("lifecycleState", lifecycleState) + .add("ownerUserName", ownerUserName) + .add("parentPath", parentPath) + .add("queryText", queryText) + .add("runAsUserName", runAsUserName) + .add("schedule", schedule) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java index cb0d96a42..b00ec3d50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AlertV2Subscription.AlertV2SubscriptionSerializer.class) +@JsonDeserialize(using = AlertV2Subscription.AlertV2SubscriptionDeserializer.class) public class AlertV2Subscription { /** */ - @JsonProperty("destination_id") private String destinationId; /** */ - @JsonProperty("user_email") private String userEmail; public AlertV2Subscription setDestinationId(String destinationId) { @@ -56,4 +65,41 @@ public String toString() { .add("userEmail", userEmail) .toString(); } + + AlertV2SubscriptionPb toPb() { + AlertV2SubscriptionPb pb = new AlertV2SubscriptionPb(); + pb.setDestinationId(destinationId); + pb.setUserEmail(userEmail); + + return pb; + } + + static AlertV2Subscription fromPb(AlertV2SubscriptionPb pb) { + AlertV2Subscription model = new AlertV2Subscription(); + model.setDestinationId(pb.getDestinationId()); + model.setUserEmail(pb.getUserEmail()); + + return model; + } + + public static class AlertV2SubscriptionSerializer extends JsonSerializer { + @Override + public void serialize(AlertV2Subscription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AlertV2SubscriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AlertV2SubscriptionDeserializer + extends JsonDeserializer { + @Override + public AlertV2Subscription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AlertV2SubscriptionPb pb = mapper.readValue(p, AlertV2SubscriptionPb.class); + return AlertV2Subscription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2SubscriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2SubscriptionPb.java new file mode 100755 index 000000000..7e32ebabf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2SubscriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AlertV2SubscriptionPb { + @JsonProperty("destination_id") + private String destinationId; + + @JsonProperty("user_email") + private String userEmail; + + public AlertV2SubscriptionPb setDestinationId(String destinationId) { + this.destinationId = destinationId; + return this; + } + + public String getDestinationId() { + return destinationId; + } + + public AlertV2SubscriptionPb setUserEmail(String userEmail) { + this.userEmail = userEmail; + return this; + } + + public String getUserEmail() { + return userEmail; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AlertV2SubscriptionPb that = (AlertV2SubscriptionPb) o; + return Objects.equals(destinationId, that.destinationId) + && Objects.equals(userEmail, that.userEmail); + } + + @Override + public int hashCode() { + return Objects.hash(destinationId, userEmail); + } + + @Override + public String toString() { + return new ToStringer(AlertV2SubscriptionPb.class) + .add("destinationId", destinationId) + .add("userEmail", userEmail) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java index 33a394bf3..a678f9747 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java @@ -21,7 +21,7 @@ public Alert create(CreateAlertRequest request) { String path = "/api/2.0/sql/alerts"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Alert.class); @@ -35,7 +35,7 @@ public void delete(TrashAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, Empty.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public Alert get(GetAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Alert.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListAlertsResponse list(ListAlertsRequest request) { String path = "/api/2.0/sql/alerts"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAlertsResponse.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public Alert update(UpdateAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Alert.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java index 286783571..33ff56226 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java @@ -22,7 +22,7 @@ public LegacyAlert create(CreateAlert request) { String path = "/api/2.0/preview/sql/alerts"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LegacyAlert.class); @@ -36,7 +36,7 @@ public void delete(DeleteAlertsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public LegacyAlert get(GetAlertsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, LegacyAlert.class); } catch (IOException e) { @@ -70,7 +70,7 @@ public void update(EditAlert request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java index b8379503e..d90017e23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java @@ -21,7 +21,7 @@ public AlertV2 createAlert(CreateAlertV2Request request) { String path = "/api/2.0/alerts"; try { Request req = new Request("POST", path, apiClient.serialize(request.getAlert())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AlertV2.class); @@ -35,7 +35,7 @@ public AlertV2 getAlert(GetAlertV2Request request) { String path = String.format("/api/2.0/alerts/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AlertV2.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public ListAlertsV2Response listAlerts(ListAlertsV2Request request) { String path = "/api/2.0/alerts"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAlertsV2Response.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public void trashAlert(TrashAlertV2Request request) { String path = String.format("/api/2.0/alerts/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, Empty.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public AlertV2 updateAlert(UpdateAlertV2Request request) { String path = String.format("/api/2.0/alerts/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request.getAlert())); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, AlertV2.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java index 1a58b0485..159d4f793 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,24 +21,22 @@ * within a manifest, and when fetching individual chunk data or links. */ @Generated +@JsonSerialize(using = BaseChunkInfo.BaseChunkInfoSerializer.class) +@JsonDeserialize(using = BaseChunkInfo.BaseChunkInfoDeserializer.class) public class BaseChunkInfo { /** * The number of bytes in the result chunk. This field is not available when using `INLINE` * disposition. */ - @JsonProperty("byte_count") private Long byteCount; /** The position within the sequence of result set chunks. */ - @JsonProperty("chunk_index") private Long chunkIndex; /** The number of rows within the result chunk. */ - @JsonProperty("row_count") private Long rowCount; /** The starting row offset within the result set. */ - @JsonProperty("row_offset") private Long rowOffset; public BaseChunkInfo setByteCount(Long byteCount) { @@ -93,4 +100,43 @@ public String toString() { .add("rowOffset", rowOffset) .toString(); } + + BaseChunkInfoPb toPb() { + BaseChunkInfoPb pb = new BaseChunkInfoPb(); + pb.setByteCount(byteCount); + pb.setChunkIndex(chunkIndex); + pb.setRowCount(rowCount); + pb.setRowOffset(rowOffset); + + return pb; + } + + static BaseChunkInfo fromPb(BaseChunkInfoPb pb) { + BaseChunkInfo model = new BaseChunkInfo(); + model.setByteCount(pb.getByteCount()); + model.setChunkIndex(pb.getChunkIndex()); + model.setRowCount(pb.getRowCount()); + model.setRowOffset(pb.getRowOffset()); + + return model; + } + + public static class BaseChunkInfoSerializer extends JsonSerializer { + @Override + public void serialize(BaseChunkInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + BaseChunkInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class BaseChunkInfoDeserializer extends JsonDeserializer { + @Override + public BaseChunkInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + BaseChunkInfoPb pb = mapper.readValue(p, BaseChunkInfoPb.class); + return BaseChunkInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfoPb.java new file mode 100755 index 000000000..3b40b065b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfoPb.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Describes metadata for a particular chunk, within a result set; this structure is used both + * within a manifest, and when fetching individual chunk data or links. + */ +@Generated +class BaseChunkInfoPb { + @JsonProperty("byte_count") + private Long byteCount; + + @JsonProperty("chunk_index") + private Long chunkIndex; + + @JsonProperty("row_count") + private Long rowCount; + + @JsonProperty("row_offset") + private Long rowOffset; + + public BaseChunkInfoPb setByteCount(Long byteCount) { + this.byteCount = byteCount; + return this; + } + + public Long getByteCount() { + return byteCount; + } + + public BaseChunkInfoPb setChunkIndex(Long chunkIndex) { + this.chunkIndex = chunkIndex; + return this; + } + + public Long getChunkIndex() { + return chunkIndex; + } + + public BaseChunkInfoPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + public BaseChunkInfoPb setRowOffset(Long rowOffset) { + this.rowOffset = rowOffset; + return this; + } + + public Long getRowOffset() { + return rowOffset; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BaseChunkInfoPb that = (BaseChunkInfoPb) o; + return Objects.equals(byteCount, that.byteCount) + && Objects.equals(chunkIndex, that.chunkIndex) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(rowOffset, that.rowOffset); + } + + @Override + public int hashCode() { + return Objects.hash(byteCount, chunkIndex, rowCount, rowOffset); + } + + @Override + public String toString() { + return new ToStringer(BaseChunkInfoPb.class) + .add("byteCount", byteCount) + .add("chunkIndex", chunkIndex) + .add("rowCount", rowCount) + .add("rowOffset", rowOffset) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java index 3226a9fb7..66af7ff00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Cancel statement execution */ @Generated +@JsonSerialize(using = CancelExecutionRequest.CancelExecutionRequestSerializer.class) +@JsonDeserialize(using = CancelExecutionRequest.CancelExecutionRequestDeserializer.class) public class CancelExecutionRequest { /** * The statement ID is returned upon successfully submitting a SQL statement, and is a required * reference for all subsequent calls. */ - @JsonIgnore private String statementId; + private String statementId; public CancelExecutionRequest setStatementId(String statementId) { this.statementId = statementId; @@ -42,4 +53,41 @@ public int hashCode() { public String toString() { return new ToStringer(CancelExecutionRequest.class).add("statementId", statementId).toString(); } + + CancelExecutionRequestPb toPb() { + CancelExecutionRequestPb pb = new CancelExecutionRequestPb(); + pb.setStatementId(statementId); + + return pb; + } + + static CancelExecutionRequest fromPb(CancelExecutionRequestPb pb) { + CancelExecutionRequest model = new CancelExecutionRequest(); + model.setStatementId(pb.getStatementId()); + + return model; + } + + public static class CancelExecutionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CancelExecutionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelExecutionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelExecutionRequestDeserializer + extends JsonDeserializer { + @Override + public CancelExecutionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelExecutionRequestPb pb = mapper.readValue(p, CancelExecutionRequestPb.class); + return CancelExecutionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequestPb.java new file mode 100755 index 000000000..b591c1bbc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Cancel statement execution */ +@Generated +class CancelExecutionRequestPb { + @JsonIgnore private String statementId; + + public CancelExecutionRequestPb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelExecutionRequestPb that = (CancelExecutionRequestPb) o; + return Objects.equals(statementId, that.statementId); + } + + @Override + public int hashCode() { + return Objects.hash(statementId); + } + + @Override + public String toString() { + return new ToStringer(CancelExecutionRequestPb.class) + .add("statementId", statementId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java index 38b973542..d715d6980 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CancelExecutionResponse.CancelExecutionResponseSerializer.class) +@JsonDeserialize(using = CancelExecutionResponse.CancelExecutionResponseDeserializer.class) public class CancelExecutionResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(CancelExecutionResponse.class).toString(); } + + CancelExecutionResponsePb toPb() { + CancelExecutionResponsePb pb = new CancelExecutionResponsePb(); + + return pb; + } + + static CancelExecutionResponse fromPb(CancelExecutionResponsePb pb) { + CancelExecutionResponse model = new CancelExecutionResponse(); + + return model; + } + + public static class CancelExecutionResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CancelExecutionResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CancelExecutionResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CancelExecutionResponseDeserializer + extends JsonDeserializer { + @Override + public CancelExecutionResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CancelExecutionResponsePb pb = mapper.readValue(p, CancelExecutionResponsePb.class); + return CancelExecutionResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponsePb.java new file mode 100755 index 000000000..949fd48d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CancelExecutionResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CancelExecutionResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java index 4ed901e2a..307abe45d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,13 +21,13 @@ * chosen only when `dbsql_version` is specified. */ @Generated +@JsonSerialize(using = Channel.ChannelSerializer.class) +@JsonDeserialize(using = Channel.ChannelDeserializer.class) public class Channel { /** */ - @JsonProperty("dbsql_version") private String dbsqlVersion; /** */ - @JsonProperty("name") private ChannelName name; public Channel setDbsqlVersion(String dbsqlVersion) { @@ -59,4 +68,39 @@ public String toString() { .add("name", name) .toString(); } + + ChannelPb toPb() { + ChannelPb pb = new ChannelPb(); + pb.setDbsqlVersion(dbsqlVersion); + pb.setName(name); + + return pb; + } + + static Channel fromPb(ChannelPb pb) { + Channel model = new Channel(); + model.setDbsqlVersion(pb.getDbsqlVersion()); + model.setName(pb.getName()); + + return model; + } + + public static class ChannelSerializer extends JsonSerializer { + @Override + public void serialize(Channel value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ChannelPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ChannelDeserializer extends JsonDeserializer { + @Override + public Channel deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ChannelPb pb = mapper.readValue(p, ChannelPb.class); + return Channel.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java index a4d259491..061c3d7ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Details about a Channel. */ @Generated +@JsonSerialize(using = ChannelInfo.ChannelInfoSerializer.class) +@JsonDeserialize(using = ChannelInfo.ChannelInfoDeserializer.class) public class ChannelInfo { /** DB SQL Version the Channel is mapped to. */ - @JsonProperty("dbsql_version") private String dbsqlVersion; /** Name of the channel */ - @JsonProperty("name") private ChannelName name; public ChannelInfo setDbsqlVersion(String dbsqlVersion) { @@ -56,4 +65,39 @@ public String toString() { .add("name", name) .toString(); } + + ChannelInfoPb toPb() { + ChannelInfoPb pb = new ChannelInfoPb(); + pb.setDbsqlVersion(dbsqlVersion); + pb.setName(name); + + return pb; + } + + static ChannelInfo fromPb(ChannelInfoPb pb) { + ChannelInfo model = new ChannelInfo(); + model.setDbsqlVersion(pb.getDbsqlVersion()); + model.setName(pb.getName()); + + return model; + } + + public static class ChannelInfoSerializer extends JsonSerializer { + @Override + public void serialize(ChannelInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ChannelInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ChannelInfoDeserializer extends JsonDeserializer { + @Override + public ChannelInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ChannelInfoPb pb = mapper.readValue(p, ChannelInfoPb.class); + return ChannelInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfoPb.java new file mode 100755 index 000000000..8a5de5df6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfoPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details about a Channel. */ +@Generated +class ChannelInfoPb { + @JsonProperty("dbsql_version") + private String dbsqlVersion; + + @JsonProperty("name") + private ChannelName name; + + public ChannelInfoPb setDbsqlVersion(String dbsqlVersion) { + this.dbsqlVersion = dbsqlVersion; + return this; + } + + public String getDbsqlVersion() { + return dbsqlVersion; + } + + public ChannelInfoPb setName(ChannelName name) { + this.name = name; + return this; + } + + public ChannelName getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ChannelInfoPb that = (ChannelInfoPb) o; + return Objects.equals(dbsqlVersion, that.dbsqlVersion) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(dbsqlVersion, name); + } + + @Override + public String toString() { + return new ToStringer(ChannelInfoPb.class) + .add("dbsqlVersion", dbsqlVersion) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelPb.java new file mode 100755 index 000000000..e3ade1079 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be + * chosen only when `dbsql_version` is specified. + */ +@Generated +class ChannelPb { + @JsonProperty("dbsql_version") + private String dbsqlVersion; + + @JsonProperty("name") + private ChannelName name; + + public ChannelPb setDbsqlVersion(String dbsqlVersion) { + this.dbsqlVersion = dbsqlVersion; + return this; + } + + public String getDbsqlVersion() { + return dbsqlVersion; + } + + public ChannelPb setName(ChannelName name) { + this.name = name; + return this; + } + + public ChannelName getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ChannelPb that = (ChannelPb) o; + return Objects.equals(dbsqlVersion, that.dbsqlVersion) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(dbsqlVersion, name); + } + + @Override + public String toString() { + return new ToStringer(ChannelPb.class) + .add("dbsqlVersion", dbsqlVersion) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java index e84cfa55d..bd1199048 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java @@ -4,49 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ClientConfig.ClientConfigSerializer.class) +@JsonDeserialize(using = ClientConfig.ClientConfigDeserializer.class) public class ClientConfig { /** */ - @JsonProperty("allow_custom_js_visualizations") private Boolean allowCustomJsVisualizations; /** */ - @JsonProperty("allow_downloads") private Boolean allowDownloads; /** */ - @JsonProperty("allow_external_shares") private Boolean allowExternalShares; /** */ - @JsonProperty("allow_subscriptions") private Boolean allowSubscriptions; /** */ - @JsonProperty("date_format") private String dateFormat; /** */ - @JsonProperty("date_time_format") private String dateTimeFormat; /** */ - @JsonProperty("disable_publish") private Boolean disablePublish; /** */ - @JsonProperty("enable_legacy_autodetect_types") private Boolean enableLegacyAutodetectTypes; /** */ - @JsonProperty("feature_show_permissions_control") private Boolean featureShowPermissionsControl; /** */ - @JsonProperty("hide_plotly_mode_bar") private Boolean hidePlotlyModeBar; public ClientConfig setAllowCustomJsVisualizations(Boolean allowCustomJsVisualizations) { @@ -186,4 +187,55 @@ public String toString() { .add("hidePlotlyModeBar", hidePlotlyModeBar) .toString(); } + + ClientConfigPb toPb() { + ClientConfigPb pb = new ClientConfigPb(); + pb.setAllowCustomJsVisualizations(allowCustomJsVisualizations); + pb.setAllowDownloads(allowDownloads); + pb.setAllowExternalShares(allowExternalShares); + pb.setAllowSubscriptions(allowSubscriptions); + pb.setDateFormat(dateFormat); + pb.setDateTimeFormat(dateTimeFormat); + pb.setDisablePublish(disablePublish); + pb.setEnableLegacyAutodetectTypes(enableLegacyAutodetectTypes); + pb.setFeatureShowPermissionsControl(featureShowPermissionsControl); + pb.setHidePlotlyModeBar(hidePlotlyModeBar); + + return pb; + } + + static ClientConfig fromPb(ClientConfigPb pb) { + ClientConfig model = new ClientConfig(); + model.setAllowCustomJsVisualizations(pb.getAllowCustomJsVisualizations()); + model.setAllowDownloads(pb.getAllowDownloads()); + model.setAllowExternalShares(pb.getAllowExternalShares()); + model.setAllowSubscriptions(pb.getAllowSubscriptions()); + model.setDateFormat(pb.getDateFormat()); + model.setDateTimeFormat(pb.getDateTimeFormat()); + model.setDisablePublish(pb.getDisablePublish()); + model.setEnableLegacyAutodetectTypes(pb.getEnableLegacyAutodetectTypes()); + model.setFeatureShowPermissionsControl(pb.getFeatureShowPermissionsControl()); + model.setHidePlotlyModeBar(pb.getHidePlotlyModeBar()); + + return model; + } + + public static class ClientConfigSerializer extends JsonSerializer { + @Override + public void serialize(ClientConfig value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ClientConfigPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ClientConfigDeserializer extends JsonDeserializer { + @Override + public ClientConfig deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ClientConfigPb pb = mapper.readValue(p, ClientConfigPb.class); + return ClientConfig.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfigPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfigPb.java new file mode 100755 index 000000000..47bc33f58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfigPb.java @@ -0,0 +1,179 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ClientConfigPb { + @JsonProperty("allow_custom_js_visualizations") + private Boolean allowCustomJsVisualizations; + + @JsonProperty("allow_downloads") + private Boolean allowDownloads; + + @JsonProperty("allow_external_shares") + private Boolean allowExternalShares; + + @JsonProperty("allow_subscriptions") + private Boolean allowSubscriptions; + + @JsonProperty("date_format") + private String dateFormat; + + @JsonProperty("date_time_format") + private String dateTimeFormat; + + @JsonProperty("disable_publish") + private Boolean disablePublish; + + @JsonProperty("enable_legacy_autodetect_types") + private Boolean enableLegacyAutodetectTypes; + + @JsonProperty("feature_show_permissions_control") + private Boolean featureShowPermissionsControl; + + @JsonProperty("hide_plotly_mode_bar") + private Boolean hidePlotlyModeBar; + + public ClientConfigPb setAllowCustomJsVisualizations(Boolean allowCustomJsVisualizations) { + this.allowCustomJsVisualizations = allowCustomJsVisualizations; + return this; + } + + public Boolean getAllowCustomJsVisualizations() { + return allowCustomJsVisualizations; + } + + public ClientConfigPb setAllowDownloads(Boolean allowDownloads) { + this.allowDownloads = allowDownloads; + return this; + } + + public Boolean getAllowDownloads() { + return allowDownloads; + } + + public ClientConfigPb setAllowExternalShares(Boolean allowExternalShares) { + this.allowExternalShares = allowExternalShares; + return this; + } + + public Boolean getAllowExternalShares() { + return allowExternalShares; + } + + public ClientConfigPb setAllowSubscriptions(Boolean allowSubscriptions) { + this.allowSubscriptions = allowSubscriptions; + return this; + } + + public Boolean getAllowSubscriptions() { + return allowSubscriptions; + } + + public ClientConfigPb setDateFormat(String dateFormat) { + this.dateFormat = dateFormat; + return this; + } + + public String getDateFormat() { + return dateFormat; + } + + public ClientConfigPb setDateTimeFormat(String dateTimeFormat) { + this.dateTimeFormat = dateTimeFormat; + return this; + } + + public String getDateTimeFormat() { + return dateTimeFormat; + } + + public ClientConfigPb setDisablePublish(Boolean disablePublish) { + this.disablePublish = disablePublish; + return this; + } + + public Boolean getDisablePublish() { + return disablePublish; + } + + public ClientConfigPb setEnableLegacyAutodetectTypes(Boolean enableLegacyAutodetectTypes) { + this.enableLegacyAutodetectTypes = enableLegacyAutodetectTypes; + return this; + } + + public Boolean getEnableLegacyAutodetectTypes() { + return enableLegacyAutodetectTypes; + } + + public ClientConfigPb setFeatureShowPermissionsControl(Boolean featureShowPermissionsControl) { + this.featureShowPermissionsControl = featureShowPermissionsControl; + return this; + } + + public Boolean getFeatureShowPermissionsControl() { + return featureShowPermissionsControl; + } + + public ClientConfigPb setHidePlotlyModeBar(Boolean hidePlotlyModeBar) { + this.hidePlotlyModeBar = hidePlotlyModeBar; + return this; + } + + public Boolean getHidePlotlyModeBar() { + return hidePlotlyModeBar; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClientConfigPb that = (ClientConfigPb) o; + return Objects.equals(allowCustomJsVisualizations, that.allowCustomJsVisualizations) + && Objects.equals(allowDownloads, that.allowDownloads) + && Objects.equals(allowExternalShares, that.allowExternalShares) + && Objects.equals(allowSubscriptions, that.allowSubscriptions) + && Objects.equals(dateFormat, that.dateFormat) + && Objects.equals(dateTimeFormat, that.dateTimeFormat) + && Objects.equals(disablePublish, that.disablePublish) + && Objects.equals(enableLegacyAutodetectTypes, that.enableLegacyAutodetectTypes) + && Objects.equals(featureShowPermissionsControl, that.featureShowPermissionsControl) + && Objects.equals(hidePlotlyModeBar, that.hidePlotlyModeBar); + } + + @Override + public int hashCode() { + return Objects.hash( + allowCustomJsVisualizations, + allowDownloads, + allowExternalShares, + allowSubscriptions, + dateFormat, + dateTimeFormat, + disablePublish, + enableLegacyAutodetectTypes, + featureShowPermissionsControl, + hidePlotlyModeBar); + } + + @Override + public String toString() { + return new ToStringer(ClientConfigPb.class) + .add("allowCustomJsVisualizations", allowCustomJsVisualizations) + .add("allowDownloads", allowDownloads) + .add("allowExternalShares", allowExternalShares) + .add("allowSubscriptions", allowSubscriptions) + .add("dateFormat", dateFormat) + .add("dateTimeFormat", dateTimeFormat) + .add("disablePublish", disablePublish) + .add("enableLegacyAutodetectTypes", enableLegacyAutodetectTypes) + .add("featureShowPermissionsControl", featureShowPermissionsControl) + .add("hidePlotlyModeBar", hidePlotlyModeBar) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java index 3c71c523c..750354f7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java @@ -4,43 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ColumnInfo.ColumnInfoSerializer.class) +@JsonDeserialize(using = ColumnInfo.ColumnInfoDeserializer.class) public class ColumnInfo { /** The name of the column. */ - @JsonProperty("name") private String name; /** The ordinal position of the column (starting at position 0). */ - @JsonProperty("position") private Long position; /** The format of the interval type. */ - @JsonProperty("type_interval_type") private String typeIntervalType; /** * The name of the base data type. This doesn't include details for complex types such as STRUCT, * MAP or ARRAY. */ - @JsonProperty("type_name") private ColumnInfoTypeName typeName; /** Specifies the number of digits in a number. This applies to the DECIMAL type. */ - @JsonProperty("type_precision") private Long typePrecision; /** * Specifies the number of digits to the right of the decimal point in a number. This applies to * the DECIMAL type. */ - @JsonProperty("type_scale") private Long typeScale; /** The full SQL type specification. */ - @JsonProperty("type_text") private String typeText; public ColumnInfo setName(String name) { @@ -138,4 +142,49 @@ public String toString() { .add("typeText", typeText) .toString(); } + + ColumnInfoPb toPb() { + ColumnInfoPb pb = new ColumnInfoPb(); + pb.setName(name); + pb.setPosition(position); + pb.setTypeIntervalType(typeIntervalType); + pb.setTypeName(typeName); + pb.setTypePrecision(typePrecision); + pb.setTypeScale(typeScale); + pb.setTypeText(typeText); + + return pb; + } + + static ColumnInfo fromPb(ColumnInfoPb pb) { + ColumnInfo model = new ColumnInfo(); + model.setName(pb.getName()); + model.setPosition(pb.getPosition()); + model.setTypeIntervalType(pb.getTypeIntervalType()); + model.setTypeName(pb.getTypeName()); + model.setTypePrecision(pb.getTypePrecision()); + model.setTypeScale(pb.getTypeScale()); + model.setTypeText(pb.getTypeText()); + + return model; + } + + public static class ColumnInfoSerializer extends JsonSerializer { + @Override + public void serialize(ColumnInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ColumnInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ColumnInfoDeserializer extends JsonDeserializer { + @Override + public ColumnInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ColumnInfoPb pb = mapper.readValue(p, ColumnInfoPb.class); + return ColumnInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoPb.java new file mode 100755 index 000000000..137339fac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ColumnInfoPb { + @JsonProperty("name") + private String name; + + @JsonProperty("position") + private Long position; + + @JsonProperty("type_interval_type") + private String typeIntervalType; + + @JsonProperty("type_name") + private ColumnInfoTypeName typeName; + + @JsonProperty("type_precision") + private Long typePrecision; + + @JsonProperty("type_scale") + private Long typeScale; + + @JsonProperty("type_text") + private String typeText; + + public ColumnInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ColumnInfoPb setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public ColumnInfoPb setTypeIntervalType(String typeIntervalType) { + this.typeIntervalType = typeIntervalType; + return this; + } + + public String getTypeIntervalType() { + return typeIntervalType; + } + + public ColumnInfoPb setTypeName(ColumnInfoTypeName typeName) { + this.typeName = typeName; + return this; + } + + public ColumnInfoTypeName getTypeName() { + return typeName; + } + + public ColumnInfoPb setTypePrecision(Long typePrecision) { + this.typePrecision = typePrecision; + return this; + } + + public Long getTypePrecision() { + return typePrecision; + } + + public ColumnInfoPb setTypeScale(Long typeScale) { + this.typeScale = typeScale; + return this; + } + + public Long getTypeScale() { + return typeScale; + } + + public ColumnInfoPb setTypeText(String typeText) { + this.typeText = typeText; + return this; + } + + public String getTypeText() { + return typeText; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ColumnInfoPb that = (ColumnInfoPb) o; + return Objects.equals(name, that.name) + && Objects.equals(position, that.position) + && Objects.equals(typeIntervalType, that.typeIntervalType) + && Objects.equals(typeName, that.typeName) + && Objects.equals(typePrecision, that.typePrecision) + && Objects.equals(typeScale, that.typeScale) + && Objects.equals(typeText, that.typeText); + } + + @Override + public int hashCode() { + return Objects.hash( + name, position, typeIntervalType, typeName, typePrecision, typeScale, typeText); + } + + @Override + public String toString() { + return new ToStringer(ColumnInfoPb.class) + .add("name", name) + .add("position", position) + .add("typeIntervalType", typeIntervalType) + .add("typeName", typeName) + .add("typePrecision", typePrecision) + .add("typeScale", typeScale) + .add("typeText", typeText) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Converters.java new file mode 100755 index 000000000..4bd32a0d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.sql; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java index 52d3f691a..c882127d5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateAlert.CreateAlertSerializer.class) +@JsonDeserialize(using = CreateAlert.CreateAlertDeserializer.class) public class CreateAlert { /** Name of the alert. */ - @JsonProperty("name") private String name; /** Alert configuration options. */ - @JsonProperty("options") private AlertOptions options; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** Query ID. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds after being triggered before the alert rearms itself and can be triggered * again. If `null`, alert will never be triggered again. */ - @JsonProperty("rearm") private Long rearm; public CreateAlert setName(String name) { @@ -104,4 +110,45 @@ public String toString() { .add("rearm", rearm) .toString(); } + + CreateAlertPb toPb() { + CreateAlertPb pb = new CreateAlertPb(); + pb.setName(name); + pb.setOptions(options); + pb.setParent(parent); + pb.setQueryId(queryId); + pb.setRearm(rearm); + + return pb; + } + + static CreateAlert fromPb(CreateAlertPb pb) { + CreateAlert model = new CreateAlert(); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setParent(pb.getParent()); + model.setQueryId(pb.getQueryId()); + model.setRearm(pb.getRearm()); + + return model; + } + + public static class CreateAlertSerializer extends JsonSerializer { + @Override + public void serialize(CreateAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAlertDeserializer extends JsonDeserializer { + @Override + public CreateAlert deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAlertPb pb = mapper.readValue(p, CreateAlertPb.class); + return CreateAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertPb.java new file mode 100755 index 000000000..eb1103594 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateAlertPb { + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private AlertOptions options; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("rearm") + private Long rearm; + + public CreateAlertPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateAlertPb setOptions(AlertOptions options) { + this.options = options; + return this; + } + + public AlertOptions getOptions() { + return options; + } + + public CreateAlertPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public CreateAlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public CreateAlertPb setRearm(Long rearm) { + this.rearm = rearm; + return this; + } + + public Long getRearm() { + return rearm; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAlertPb that = (CreateAlertPb) o; + return Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(parent, that.parent) + && Objects.equals(queryId, that.queryId) + && Objects.equals(rearm, that.rearm); + } + + @Override + public int hashCode() { + return Objects.hash(name, options, parent, queryId, rearm); + } + + @Override + public String toString() { + return new ToStringer(CreateAlertPb.class) + .add("name", name) + .add("options", options) + .add("parent", parent) + .add("queryId", queryId) + .add("rearm", rearm) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java index 3bed24fef..6a5461a68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateAlertRequest.CreateAlertRequestSerializer.class) +@JsonDeserialize(using = CreateAlertRequest.CreateAlertRequestDeserializer.class) public class CreateAlertRequest { /** */ - @JsonProperty("alert") private CreateAlertRequestAlert alert; /** * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the * alert's display name conflicts with an existing alert's display name. */ - @JsonProperty("auto_resolve_display_name") private Boolean autoResolveDisplayName; public CreateAlertRequest setAlert(CreateAlertRequestAlert alert) { @@ -59,4 +68,40 @@ public String toString() { .add("autoResolveDisplayName", autoResolveDisplayName) .toString(); } + + CreateAlertRequestPb toPb() { + CreateAlertRequestPb pb = new CreateAlertRequestPb(); + pb.setAlert(alert); + pb.setAutoResolveDisplayName(autoResolveDisplayName); + + return pb; + } + + static CreateAlertRequest fromPb(CreateAlertRequestPb pb) { + CreateAlertRequest model = new CreateAlertRequest(); + model.setAlert(pb.getAlert()); + model.setAutoResolveDisplayName(pb.getAutoResolveDisplayName()); + + return model; + } + + public static class CreateAlertRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateAlertRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAlertRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAlertRequestDeserializer extends JsonDeserializer { + @Override + public CreateAlertRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAlertRequestPb pb = mapper.readValue(p, CreateAlertRequestPb.class); + return CreateAlertRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java index 80af13302..eb34ed6aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateAlertRequestAlert.CreateAlertRequestAlertSerializer.class) +@JsonDeserialize(using = CreateAlertRequestAlert.CreateAlertRequestAlertDeserializer.class) public class CreateAlertRequestAlert { /** Trigger conditions of the alert. */ - @JsonProperty("condition") private AlertCondition condition; /** @@ -18,7 +28,6 @@ public class CreateAlertRequestAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_body") private String customBody; /** @@ -27,30 +36,24 @@ public class CreateAlertRequestAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_subject") private String customSubject; /** The display name of the alert. */ - @JsonProperty("display_name") private String displayName; /** Whether to notify alert subscribers when alert returns back to normal. */ - @JsonProperty("notify_on_ok") private Boolean notifyOnOk; /** The workspace path of the folder containing the alert. */ - @JsonProperty("parent_path") private String parentPath; /** UUID of the query attached to the alert. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it * can be triggered again. If 0 or not specified, the alert will not be triggered again. */ - @JsonProperty("seconds_to_retrigger") private Long secondsToRetrigger; public CreateAlertRequestAlert setCondition(AlertCondition condition) { @@ -166,4 +169,55 @@ public String toString() { .add("secondsToRetrigger", secondsToRetrigger) .toString(); } + + CreateAlertRequestAlertPb toPb() { + CreateAlertRequestAlertPb pb = new CreateAlertRequestAlertPb(); + pb.setCondition(condition); + pb.setCustomBody(customBody); + pb.setCustomSubject(customSubject); + pb.setDisplayName(displayName); + pb.setNotifyOnOk(notifyOnOk); + pb.setParentPath(parentPath); + pb.setQueryId(queryId); + pb.setSecondsToRetrigger(secondsToRetrigger); + + return pb; + } + + static CreateAlertRequestAlert fromPb(CreateAlertRequestAlertPb pb) { + CreateAlertRequestAlert model = new CreateAlertRequestAlert(); + model.setCondition(pb.getCondition()); + model.setCustomBody(pb.getCustomBody()); + model.setCustomSubject(pb.getCustomSubject()); + model.setDisplayName(pb.getDisplayName()); + model.setNotifyOnOk(pb.getNotifyOnOk()); + model.setParentPath(pb.getParentPath()); + model.setQueryId(pb.getQueryId()); + model.setSecondsToRetrigger(pb.getSecondsToRetrigger()); + + return model; + } + + public static class CreateAlertRequestAlertSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateAlertRequestAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAlertRequestAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAlertRequestAlertDeserializer + extends JsonDeserializer { + @Override + public CreateAlertRequestAlert deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAlertRequestAlertPb pb = mapper.readValue(p, CreateAlertRequestAlertPb.class); + return CreateAlertRequestAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlertPb.java new file mode 100755 index 000000000..ebf0553fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlertPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateAlertRequestAlertPb { + @JsonProperty("condition") + private AlertCondition condition; + + @JsonProperty("custom_body") + private String customBody; + + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("seconds_to_retrigger") + private Long secondsToRetrigger; + + public CreateAlertRequestAlertPb setCondition(AlertCondition condition) { + this.condition = condition; + return this; + } + + public AlertCondition getCondition() { + return condition; + } + + public CreateAlertRequestAlertPb setCustomBody(String customBody) { + this.customBody = customBody; + return this; + } + + public String getCustomBody() { + return customBody; + } + + public CreateAlertRequestAlertPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public CreateAlertRequestAlertPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateAlertRequestAlertPb setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + + public CreateAlertRequestAlertPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public CreateAlertRequestAlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public CreateAlertRequestAlertPb setSecondsToRetrigger(Long secondsToRetrigger) { + this.secondsToRetrigger = secondsToRetrigger; + return this; + } + + public Long getSecondsToRetrigger() { + return secondsToRetrigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAlertRequestAlertPb that = (CreateAlertRequestAlertPb) o; + return Objects.equals(condition, that.condition) + && Objects.equals(customBody, that.customBody) + && Objects.equals(customSubject, that.customSubject) + && Objects.equals(displayName, that.displayName) + && Objects.equals(notifyOnOk, that.notifyOnOk) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(queryId, that.queryId) + && Objects.equals(secondsToRetrigger, that.secondsToRetrigger); + } + + @Override + public int hashCode() { + return Objects.hash( + condition, + customBody, + customSubject, + displayName, + notifyOnOk, + parentPath, + queryId, + secondsToRetrigger); + } + + @Override + public String toString() { + return new ToStringer(CreateAlertRequestAlertPb.class) + .add("condition", condition) + .add("customBody", customBody) + .add("customSubject", customSubject) + .add("displayName", displayName) + .add("notifyOnOk", notifyOnOk) + .add("parentPath", parentPath) + .add("queryId", queryId) + .add("secondsToRetrigger", secondsToRetrigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestPb.java new file mode 100755 index 000000000..707873d0b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateAlertRequestPb { + @JsonProperty("alert") + private CreateAlertRequestAlert alert; + + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + + public CreateAlertRequestPb setAlert(CreateAlertRequestAlert alert) { + this.alert = alert; + return this; + } + + public CreateAlertRequestAlert getAlert() { + return alert; + } + + public CreateAlertRequestPb setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAlertRequestPb that = (CreateAlertRequestPb) o; + return Objects.equals(alert, that.alert) + && Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName); + } + + @Override + public int hashCode() { + return Objects.hash(alert, autoResolveDisplayName); + } + + @Override + public String toString() { + return new ToStringer(CreateAlertRequestPb.class) + .add("alert", alert) + .add("autoResolveDisplayName", autoResolveDisplayName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java index 10b1698ab..e82a63d6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Create an alert */ @Generated +@JsonSerialize(using = CreateAlertV2Request.CreateAlertV2RequestSerializer.class) +@JsonDeserialize(using = CreateAlertV2Request.CreateAlertV2RequestDeserializer.class) public class CreateAlertV2Request { /** */ - @JsonProperty("alert") private AlertV2 alert; public CreateAlertV2Request setAlert(AlertV2 alert) { @@ -40,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(CreateAlertV2Request.class).add("alert", alert).toString(); } + + CreateAlertV2RequestPb toPb() { + CreateAlertV2RequestPb pb = new CreateAlertV2RequestPb(); + pb.setAlert(alert); + + return pb; + } + + static CreateAlertV2Request fromPb(CreateAlertV2RequestPb pb) { + CreateAlertV2Request model = new CreateAlertV2Request(); + model.setAlert(pb.getAlert()); + + return model; + } + + public static class CreateAlertV2RequestSerializer extends JsonSerializer { + @Override + public void serialize( + CreateAlertV2Request value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateAlertV2RequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateAlertV2RequestDeserializer + extends JsonDeserializer { + @Override + public CreateAlertV2Request deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateAlertV2RequestPb pb = mapper.readValue(p, CreateAlertV2RequestPb.class); + return CreateAlertV2Request.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2RequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2RequestPb.java new file mode 100755 index 000000000..0c0c3233d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2RequestPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create an alert */ +@Generated +class CreateAlertV2RequestPb { + @JsonProperty("alert") + private AlertV2 alert; + + public CreateAlertV2RequestPb setAlert(AlertV2 alert) { + this.alert = alert; + return this; + } + + public AlertV2 getAlert() { + return alert; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAlertV2RequestPb that = (CreateAlertV2RequestPb) o; + return Objects.equals(alert, that.alert); + } + + @Override + public int hashCode() { + return Objects.hash(alert); + } + + @Override + public String toString() { + return new ToStringer(CreateAlertV2RequestPb.class).add("alert", alert).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java index 99a698477..0c1dc7ad1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateQueryRequest.CreateQueryRequestSerializer.class) +@JsonDeserialize(using = CreateQueryRequest.CreateQueryRequestDeserializer.class) public class CreateQueryRequest { /** * If true, automatically resolve query display name conflicts. Otherwise, fail the request if the * query's display name conflicts with an existing query's display name. */ - @JsonProperty("auto_resolve_display_name") private Boolean autoResolveDisplayName; /** */ - @JsonProperty("query") private CreateQueryRequestQuery query; public CreateQueryRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) { @@ -59,4 +68,40 @@ public String toString() { .add("query", query) .toString(); } + + CreateQueryRequestPb toPb() { + CreateQueryRequestPb pb = new CreateQueryRequestPb(); + pb.setAutoResolveDisplayName(autoResolveDisplayName); + pb.setQuery(query); + + return pb; + } + + static CreateQueryRequest fromPb(CreateQueryRequestPb pb) { + CreateQueryRequest model = new CreateQueryRequest(); + model.setAutoResolveDisplayName(pb.getAutoResolveDisplayName()); + model.setQuery(pb.getQuery()); + + return model; + } + + public static class CreateQueryRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateQueryRequestDeserializer extends JsonDeserializer { + @Override + public CreateQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateQueryRequestPb pb = mapper.readValue(p, CreateQueryRequestPb.class); + return CreateQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestPb.java new file mode 100755 index 000000000..f78e2adb1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateQueryRequestPb { + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + + @JsonProperty("query") + private CreateQueryRequestQuery query; + + public CreateQueryRequestPb setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + + public CreateQueryRequestPb setQuery(CreateQueryRequestQuery query) { + this.query = query; + return this; + } + + public CreateQueryRequestQuery getQuery() { + return query; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQueryRequestPb that = (CreateQueryRequestPb) o; + return Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName) + && Objects.equals(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash(autoResolveDisplayName, query); + } + + @Override + public String toString() { + return new ToStringer(CreateQueryRequestPb.class) + .add("autoResolveDisplayName", autoResolveDisplayName) + .add("query", query) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQuery.java index 021b71fd0..5752d314c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQuery.java @@ -4,58 +4,58 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = CreateQueryRequestQuery.CreateQueryRequestQuerySerializer.class) +@JsonDeserialize(using = CreateQueryRequestQuery.CreateQueryRequestQueryDeserializer.class) public class CreateQueryRequestQuery { /** Whether to apply a 1000 row limit to the query result. */ - @JsonProperty("apply_auto_limit") private Boolean applyAutoLimit; /** Name of the catalog where this query will be executed. */ - @JsonProperty("catalog") private String catalog; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** * Display name of the query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("display_name") private String displayName; /** List of query parameter definitions. */ - @JsonProperty("parameters") private Collection parameters; /** Workspace path of the workspace folder containing the object. */ - @JsonProperty("parent_path") private String parentPath; /** Text of the query to be run. */ - @JsonProperty("query_text") private String queryText; /** Sets the "Run as" role for the object. */ - @JsonProperty("run_as_mode") private RunAsMode runAsMode; /** Name of the schema where this query will be executed. */ - @JsonProperty("schema") private String schema; /** */ - @JsonProperty("tags") private Collection tags; /** ID of the SQL warehouse attached to the query. */ - @JsonProperty("warehouse_id") private String warehouseId; public CreateQueryRequestQuery setApplyAutoLimit(Boolean applyAutoLimit) { @@ -207,4 +207,61 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + CreateQueryRequestQueryPb toPb() { + CreateQueryRequestQueryPb pb = new CreateQueryRequestQueryPb(); + pb.setApplyAutoLimit(applyAutoLimit); + pb.setCatalog(catalog); + pb.setDescription(description); + pb.setDisplayName(displayName); + pb.setParameters(parameters); + pb.setParentPath(parentPath); + pb.setQueryText(queryText); + pb.setRunAsMode(runAsMode); + pb.setSchema(schema); + pb.setTags(tags); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static CreateQueryRequestQuery fromPb(CreateQueryRequestQueryPb pb) { + CreateQueryRequestQuery model = new CreateQueryRequestQuery(); + model.setApplyAutoLimit(pb.getApplyAutoLimit()); + model.setCatalog(pb.getCatalog()); + model.setDescription(pb.getDescription()); + model.setDisplayName(pb.getDisplayName()); + model.setParameters(pb.getParameters()); + model.setParentPath(pb.getParentPath()); + model.setQueryText(pb.getQueryText()); + model.setRunAsMode(pb.getRunAsMode()); + model.setSchema(pb.getSchema()); + model.setTags(pb.getTags()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class CreateQueryRequestQuerySerializer + extends JsonSerializer { + @Override + public void serialize( + CreateQueryRequestQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateQueryRequestQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateQueryRequestQueryDeserializer + extends JsonDeserializer { + @Override + public CreateQueryRequestQuery deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateQueryRequestQueryPb pb = mapper.readValue(p, CreateQueryRequestQueryPb.class); + return CreateQueryRequestQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQueryPb.java new file mode 100755 index 000000000..bb9385e78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQueryPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class CreateQueryRequestQueryPb { + @JsonProperty("apply_auto_limit") + private Boolean applyAutoLimit; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("description") + private String description; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("run_as_mode") + private RunAsMode runAsMode; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public CreateQueryRequestQueryPb setApplyAutoLimit(Boolean applyAutoLimit) { + this.applyAutoLimit = applyAutoLimit; + return this; + } + + public Boolean getApplyAutoLimit() { + return applyAutoLimit; + } + + public CreateQueryRequestQueryPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public CreateQueryRequestQueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateQueryRequestQueryPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateQueryRequestQueryPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public CreateQueryRequestQueryPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public CreateQueryRequestQueryPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public CreateQueryRequestQueryPb setRunAsMode(RunAsMode runAsMode) { + this.runAsMode = runAsMode; + return this; + } + + public RunAsMode getRunAsMode() { + return runAsMode; + } + + public CreateQueryRequestQueryPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public CreateQueryRequestQueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public CreateQueryRequestQueryPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQueryRequestQueryPb that = (CreateQueryRequestQueryPb) o; + return Objects.equals(applyAutoLimit, that.applyAutoLimit) + && Objects.equals(catalog, that.catalog) + && Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(parameters, that.parameters) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(queryText, that.queryText) + && Objects.equals(runAsMode, that.runAsMode) + && Objects.equals(schema, that.schema) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + applyAutoLimit, + catalog, + description, + displayName, + parameters, + parentPath, + queryText, + runAsMode, + schema, + tags, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(CreateQueryRequestQueryPb.class) + .add("applyAutoLimit", applyAutoLimit) + .add("catalog", catalog) + .add("description", description) + .add("displayName", displayName) + .add("parameters", parameters) + .add("parentPath", parentPath) + .add("queryText", queryText) + .add("runAsMode", runAsMode) + .add("schema", schema) + .add("tags", tags) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequest.java index 8fcd94153..1d850fd1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequest.java @@ -4,33 +4,45 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Add visualization to a query */ @Generated +@JsonSerialize( + using = + CreateQueryVisualizationsLegacyRequest.CreateQueryVisualizationsLegacyRequestSerializer + .class) +@JsonDeserialize( + using = + CreateQueryVisualizationsLegacyRequest.CreateQueryVisualizationsLegacyRequestDeserializer + .class) public class CreateQueryVisualizationsLegacyRequest { /** A short description of this visualization. This is not displayed in the UI. */ - @JsonProperty("description") private String description; /** The name of the visualization that appears on dashboards and the query screen. */ - @JsonProperty("name") private String name; /** * The options object varies widely from one visualization type to the next and is unsupported. * Databricks does not recommend modifying visualization settings in JSON. */ - @JsonProperty("options") private Object options; /** The identifier returned by :method:queries/create */ - @JsonProperty("query_id") private String queryId; /** The type of visualization: chart, table, pivot table, and so on. */ - @JsonProperty("type") private String typeValue; public CreateQueryVisualizationsLegacyRequest setDescription(String description) { @@ -105,4 +117,53 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + CreateQueryVisualizationsLegacyRequestPb toPb() { + CreateQueryVisualizationsLegacyRequestPb pb = new CreateQueryVisualizationsLegacyRequestPb(); + pb.setDescription(description); + pb.setName(name); + pb.setOptions(options); + pb.setQueryId(queryId); + pb.setType(typeValue); + + return pb; + } + + static CreateQueryVisualizationsLegacyRequest fromPb( + CreateQueryVisualizationsLegacyRequestPb pb) { + CreateQueryVisualizationsLegacyRequest model = new CreateQueryVisualizationsLegacyRequest(); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setQueryId(pb.getQueryId()); + model.setType(pb.getType()); + + return model; + } + + public static class CreateQueryVisualizationsLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateQueryVisualizationsLegacyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateQueryVisualizationsLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateQueryVisualizationsLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public CreateQueryVisualizationsLegacyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateQueryVisualizationsLegacyRequestPb pb = + mapper.readValue(p, CreateQueryVisualizationsLegacyRequestPb.class); + return CreateQueryVisualizationsLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequestPb.java new file mode 100755 index 000000000..58a16bd83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequestPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Add visualization to a query */ +@Generated +class CreateQueryVisualizationsLegacyRequestPb { + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Object options; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("type") + private String typeValue; + + public CreateQueryVisualizationsLegacyRequestPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public CreateQueryVisualizationsLegacyRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateQueryVisualizationsLegacyRequestPb setOptions(Object options) { + this.options = options; + return this; + } + + public Object getOptions() { + return options; + } + + public CreateQueryVisualizationsLegacyRequestPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public CreateQueryVisualizationsLegacyRequestPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQueryVisualizationsLegacyRequestPb that = (CreateQueryVisualizationsLegacyRequestPb) o; + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(queryId, that.queryId) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(description, name, options, queryId, typeValue); + } + + @Override + public String toString() { + return new ToStringer(CreateQueryVisualizationsLegacyRequestPb.class) + .add("description", description) + .add("name", name) + .add("options", options) + .add("queryId", queryId) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequest.java index a80bab34c..981e17aca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequest.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateVisualizationRequest.CreateVisualizationRequestSerializer.class) +@JsonDeserialize(using = CreateVisualizationRequest.CreateVisualizationRequestDeserializer.class) public class CreateVisualizationRequest { /** */ - @JsonProperty("visualization") private CreateVisualizationRequestVisualization visualization; public CreateVisualizationRequest setVisualization( @@ -42,4 +52,41 @@ public String toString() { .add("visualization", visualization) .toString(); } + + CreateVisualizationRequestPb toPb() { + CreateVisualizationRequestPb pb = new CreateVisualizationRequestPb(); + pb.setVisualization(visualization); + + return pb; + } + + static CreateVisualizationRequest fromPb(CreateVisualizationRequestPb pb) { + CreateVisualizationRequest model = new CreateVisualizationRequest(); + model.setVisualization(pb.getVisualization()); + + return model; + } + + public static class CreateVisualizationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateVisualizationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateVisualizationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateVisualizationRequestDeserializer + extends JsonDeserializer { + @Override + public CreateVisualizationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateVisualizationRequestPb pb = mapper.readValue(p, CreateVisualizationRequestPb.class); + return CreateVisualizationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestPb.java new file mode 100755 index 000000000..9d2080eff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateVisualizationRequestPb { + @JsonProperty("visualization") + private CreateVisualizationRequestVisualization visualization; + + public CreateVisualizationRequestPb setVisualization( + CreateVisualizationRequestVisualization visualization) { + this.visualization = visualization; + return this; + } + + public CreateVisualizationRequestVisualization getVisualization() { + return visualization; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateVisualizationRequestPb that = (CreateVisualizationRequestPb) o; + return Objects.equals(visualization, that.visualization); + } + + @Override + public int hashCode() { + return Objects.hash(visualization); + } + + @Override + public String toString() { + return new ToStringer(CreateVisualizationRequestPb.class) + .add("visualization", visualization) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualization.java index 8d2849c86..c5ef4f1b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualization.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualization.java @@ -4,35 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + CreateVisualizationRequestVisualization.CreateVisualizationRequestVisualizationSerializer + .class) +@JsonDeserialize( + using = + CreateVisualizationRequestVisualization.CreateVisualizationRequestVisualizationDeserializer + .class) public class CreateVisualizationRequestVisualization { /** The display name of the visualization. */ - @JsonProperty("display_name") private String displayName; /** UUID of the query that the visualization is attached to. */ - @JsonProperty("query_id") private String queryId; /** * The visualization options varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying visualization options directly. */ - @JsonProperty("serialized_options") private String serializedOptions; /** * The visualization query plan varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying the visualization query plan directly. */ - @JsonProperty("serialized_query_plan") private String serializedQueryPlan; /** The type of visualization: counter, table, funnel, and so on. */ - @JsonProperty("type") private String typeValue; public CreateVisualizationRequestVisualization setDisplayName(String displayName) { @@ -108,4 +120,53 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + CreateVisualizationRequestVisualizationPb toPb() { + CreateVisualizationRequestVisualizationPb pb = new CreateVisualizationRequestVisualizationPb(); + pb.setDisplayName(displayName); + pb.setQueryId(queryId); + pb.setSerializedOptions(serializedOptions); + pb.setSerializedQueryPlan(serializedQueryPlan); + pb.setType(typeValue); + + return pb; + } + + static CreateVisualizationRequestVisualization fromPb( + CreateVisualizationRequestVisualizationPb pb) { + CreateVisualizationRequestVisualization model = new CreateVisualizationRequestVisualization(); + model.setDisplayName(pb.getDisplayName()); + model.setQueryId(pb.getQueryId()); + model.setSerializedOptions(pb.getSerializedOptions()); + model.setSerializedQueryPlan(pb.getSerializedQueryPlan()); + model.setType(pb.getType()); + + return model; + } + + public static class CreateVisualizationRequestVisualizationSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateVisualizationRequestVisualization value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + CreateVisualizationRequestVisualizationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateVisualizationRequestVisualizationDeserializer + extends JsonDeserializer { + @Override + public CreateVisualizationRequestVisualization deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateVisualizationRequestVisualizationPb pb = + mapper.readValue(p, CreateVisualizationRequestVisualizationPb.class); + return CreateVisualizationRequestVisualization.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualizationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualizationPb.java new file mode 100755 index 000000000..e85a2d19c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateVisualizationRequestVisualizationPb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateVisualizationRequestVisualizationPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("serialized_options") + private String serializedOptions; + + @JsonProperty("serialized_query_plan") + private String serializedQueryPlan; + + @JsonProperty("type") + private String typeValue; + + public CreateVisualizationRequestVisualizationPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateVisualizationRequestVisualizationPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public CreateVisualizationRequestVisualizationPb setSerializedOptions(String serializedOptions) { + this.serializedOptions = serializedOptions; + return this; + } + + public String getSerializedOptions() { + return serializedOptions; + } + + public CreateVisualizationRequestVisualizationPb setSerializedQueryPlan( + String serializedQueryPlan) { + this.serializedQueryPlan = serializedQueryPlan; + return this; + } + + public String getSerializedQueryPlan() { + return serializedQueryPlan; + } + + public CreateVisualizationRequestVisualizationPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateVisualizationRequestVisualizationPb that = (CreateVisualizationRequestVisualizationPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(queryId, that.queryId) + && Objects.equals(serializedOptions, that.serializedOptions) + && Objects.equals(serializedQueryPlan, that.serializedQueryPlan) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, queryId, serializedOptions, serializedQueryPlan, typeValue); + } + + @Override + public String toString() { + return new ToStringer(CreateVisualizationRequestVisualizationPb.class) + .add("displayName", displayName) + .add("queryId", queryId) + .add("serializedOptions", serializedOptions) + .add("serializedQueryPlan", serializedQueryPlan) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java index af89d90bb..4b899847b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateWarehouseRequest.CreateWarehouseRequestSerializer.class) +@JsonDeserialize(using = CreateWarehouseRequest.CreateWarehouseRequestDeserializer.class) public class CreateWarehouseRequest { /** * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) @@ -18,11 +29,9 @@ public class CreateWarehouseRequest { * *

Defaults to 120 mins */ - @JsonProperty("auto_stop_mins") private Long autoStopMins; /** Channel Details */ - @JsonProperty("channel") private Channel channel; /** @@ -33,11 +42,9 @@ public class CreateWarehouseRequest { *

Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - * 3X-Large - 4X-Large */ - @JsonProperty("cluster_size") private String clusterSize; /** warehouse creator name */ - @JsonProperty("creator_name") private String creatorName; /** @@ -45,15 +52,12 @@ public class CreateWarehouseRequest { * *

Defaults to false. */ - @JsonProperty("enable_photon") private Boolean enablePhoton; /** Configures whether the warehouse should use serverless compute */ - @JsonProperty("enable_serverless_compute") private Boolean enableServerlessCompute; /** Deprecated. Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** @@ -63,7 +67,6 @@ public class CreateWarehouseRequest { * *

Defaults to min_clusters if unset. */ - @JsonProperty("max_num_clusters") private Long maxNumClusters; /** @@ -76,7 +79,6 @@ public class CreateWarehouseRequest { * *

Defaults to 1 */ - @JsonProperty("min_num_clusters") private Long minNumClusters; /** @@ -84,11 +86,9 @@ public class CreateWarehouseRequest { * *

Supported values: - Must be unique within an org. - Must be less than 100 characters. */ - @JsonProperty("name") private String name; /** Configurations whether the warehouse should use spot instances. */ - @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; /** @@ -97,14 +97,12 @@ public class CreateWarehouseRequest { * *

Supported values: - Number of tags < 45. */ - @JsonProperty("tags") private EndpointTags tags; /** * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to * `PRO` and also set the field `enable_serverless_compute` to `true`. */ - @JsonProperty("warehouse_type") private CreateWarehouseRequestWarehouseType warehouseType; public CreateWarehouseRequest setAutoStopMins(Long autoStopMins) { @@ -281,4 +279,65 @@ public String toString() { .add("warehouseType", warehouseType) .toString(); } + + CreateWarehouseRequestPb toPb() { + CreateWarehouseRequestPb pb = new CreateWarehouseRequestPb(); + pb.setAutoStopMins(autoStopMins); + pb.setChannel(channel); + pb.setClusterSize(clusterSize); + pb.setCreatorName(creatorName); + pb.setEnablePhoton(enablePhoton); + pb.setEnableServerlessCompute(enableServerlessCompute); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxNumClusters(maxNumClusters); + pb.setMinNumClusters(minNumClusters); + pb.setName(name); + pb.setSpotInstancePolicy(spotInstancePolicy); + pb.setTags(tags); + pb.setWarehouseType(warehouseType); + + return pb; + } + + static CreateWarehouseRequest fromPb(CreateWarehouseRequestPb pb) { + CreateWarehouseRequest model = new CreateWarehouseRequest(); + model.setAutoStopMins(pb.getAutoStopMins()); + model.setChannel(pb.getChannel()); + model.setClusterSize(pb.getClusterSize()); + model.setCreatorName(pb.getCreatorName()); + model.setEnablePhoton(pb.getEnablePhoton()); + model.setEnableServerlessCompute(pb.getEnableServerlessCompute()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxNumClusters(pb.getMaxNumClusters()); + model.setMinNumClusters(pb.getMinNumClusters()); + model.setName(pb.getName()); + model.setSpotInstancePolicy(pb.getSpotInstancePolicy()); + model.setTags(pb.getTags()); + model.setWarehouseType(pb.getWarehouseType()); + + return model; + } + + public static class CreateWarehouseRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateWarehouseRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateWarehouseRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateWarehouseRequestDeserializer + extends JsonDeserializer { + @Override + public CreateWarehouseRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateWarehouseRequestPb pb = mapper.readValue(p, CreateWarehouseRequestPb.class); + return CreateWarehouseRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestPb.java new file mode 100755 index 000000000..249a0c4f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestPb.java @@ -0,0 +1,225 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateWarehouseRequestPb { + @JsonProperty("auto_stop_mins") + private Long autoStopMins; + + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("cluster_size") + private String clusterSize; + + @JsonProperty("creator_name") + private String creatorName; + + @JsonProperty("enable_photon") + private Boolean enablePhoton; + + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_num_clusters") + private Long maxNumClusters; + + @JsonProperty("min_num_clusters") + private Long minNumClusters; + + @JsonProperty("name") + private String name; + + @JsonProperty("spot_instance_policy") + private SpotInstancePolicy spotInstancePolicy; + + @JsonProperty("tags") + private EndpointTags tags; + + @JsonProperty("warehouse_type") + private CreateWarehouseRequestWarehouseType warehouseType; + + public CreateWarehouseRequestPb setAutoStopMins(Long autoStopMins) { + this.autoStopMins = autoStopMins; + return this; + } + + public Long getAutoStopMins() { + return autoStopMins; + } + + public CreateWarehouseRequestPb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public CreateWarehouseRequestPb setClusterSize(String clusterSize) { + this.clusterSize = clusterSize; + return this; + } + + public String getClusterSize() { + return clusterSize; + } + + public CreateWarehouseRequestPb setCreatorName(String creatorName) { + this.creatorName = creatorName; + return this; + } + + public String getCreatorName() { + return creatorName; + } + + public CreateWarehouseRequestPb setEnablePhoton(Boolean enablePhoton) { + this.enablePhoton = enablePhoton; + return this; + } + + public Boolean getEnablePhoton() { + return enablePhoton; + } + + public CreateWarehouseRequestPb setEnableServerlessCompute(Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + + public CreateWarehouseRequestPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public CreateWarehouseRequestPb setMaxNumClusters(Long maxNumClusters) { + this.maxNumClusters = maxNumClusters; + return this; + } + + public Long getMaxNumClusters() { + return maxNumClusters; + } + + public CreateWarehouseRequestPb setMinNumClusters(Long minNumClusters) { + this.minNumClusters = minNumClusters; + return this; + } + + public Long getMinNumClusters() { + return minNumClusters; + } + + public CreateWarehouseRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateWarehouseRequestPb setSpotInstancePolicy(SpotInstancePolicy spotInstancePolicy) { + this.spotInstancePolicy = spotInstancePolicy; + return this; + } + + public SpotInstancePolicy getSpotInstancePolicy() { + return spotInstancePolicy; + } + + public CreateWarehouseRequestPb setTags(EndpointTags tags) { + this.tags = tags; + return this; + } + + public EndpointTags getTags() { + return tags; + } + + public CreateWarehouseRequestPb setWarehouseType( + CreateWarehouseRequestWarehouseType warehouseType) { + this.warehouseType = warehouseType; + return this; + } + + public CreateWarehouseRequestWarehouseType getWarehouseType() { + return warehouseType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWarehouseRequestPb that = (CreateWarehouseRequestPb) o; + return Objects.equals(autoStopMins, that.autoStopMins) + && Objects.equals(channel, that.channel) + && Objects.equals(clusterSize, that.clusterSize) + && Objects.equals(creatorName, that.creatorName) + && Objects.equals(enablePhoton, that.enablePhoton) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxNumClusters, that.maxNumClusters) + && Objects.equals(minNumClusters, that.minNumClusters) + && Objects.equals(name, that.name) + && Objects.equals(spotInstancePolicy, that.spotInstancePolicy) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseType, that.warehouseType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoStopMins, + channel, + clusterSize, + creatorName, + enablePhoton, + enableServerlessCompute, + instanceProfileArn, + maxNumClusters, + minNumClusters, + name, + spotInstancePolicy, + tags, + warehouseType); + } + + @Override + public String toString() { + return new ToStringer(CreateWarehouseRequestPb.class) + .add("autoStopMins", autoStopMins) + .add("channel", channel) + .add("clusterSize", clusterSize) + .add("creatorName", creatorName) + .add("enablePhoton", enablePhoton) + .add("enableServerlessCompute", enableServerlessCompute) + .add("instanceProfileArn", instanceProfileArn) + .add("maxNumClusters", maxNumClusters) + .add("minNumClusters", minNumClusters) + .add("name", name) + .add("spotInstancePolicy", spotInstancePolicy) + .add("tags", tags) + .add("warehouseType", warehouseType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponse.java index 83e5c01d2..d5634661d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateWarehouseResponse.CreateWarehouseResponseSerializer.class) +@JsonDeserialize(using = CreateWarehouseResponse.CreateWarehouseResponseDeserializer.class) public class CreateWarehouseResponse { /** Id for the SQL warehouse. This value is unique across all SQL warehouses. */ - @JsonProperty("id") private String id; public CreateWarehouseResponse setId(String id) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(CreateWarehouseResponse.class).add("id", id).toString(); } + + CreateWarehouseResponsePb toPb() { + CreateWarehouseResponsePb pb = new CreateWarehouseResponsePb(); + pb.setId(id); + + return pb; + } + + static CreateWarehouseResponse fromPb(CreateWarehouseResponsePb pb) { + CreateWarehouseResponse model = new CreateWarehouseResponse(); + model.setId(pb.getId()); + + return model; + } + + public static class CreateWarehouseResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public CreateWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateWarehouseResponsePb pb = mapper.readValue(p, CreateWarehouseResponsePb.class); + return CreateWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponsePb.java new file mode 100755 index 000000000..34fa15e9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponsePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateWarehouseResponsePb { + @JsonProperty("id") + private String id; + + public CreateWarehouseResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWarehouseResponsePb that = (CreateWarehouseResponsePb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CreateWarehouseResponsePb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidget.java index 660f99c1d..20c12bdaa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidget.java @@ -4,36 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateWidget.CreateWidgetSerializer.class) +@JsonDeserialize(using = CreateWidget.CreateWidgetDeserializer.class) public class CreateWidget { /** Dashboard ID returned by :method:dashboards/create. */ - @JsonProperty("dashboard_id") private String dashboardId; /** Widget ID returned by :method:dashboardwidgets/create */ - @JsonIgnore private String id; + private String id; /** */ - @JsonProperty("options") private WidgetOptions options; /** * If this is a textbox widget, the application displays this text. This field is ignored if the * widget contains a visualization in the `visualization` field. */ - @JsonProperty("text") private String text; /** Query Vizualization ID returned by :method:queryvisualizations/create. */ - @JsonProperty("visualization_id") private String visualizationId; /** Width of a widget */ - @JsonProperty("width") private Long width; public CreateWidget setDashboardId(String dashboardId) { @@ -119,4 +124,47 @@ public String toString() { .add("width", width) .toString(); } + + CreateWidgetPb toPb() { + CreateWidgetPb pb = new CreateWidgetPb(); + pb.setDashboardId(dashboardId); + pb.setId(id); + pb.setOptions(options); + pb.setText(text); + pb.setVisualizationId(visualizationId); + pb.setWidth(width); + + return pb; + } + + static CreateWidget fromPb(CreateWidgetPb pb) { + CreateWidget model = new CreateWidget(); + model.setDashboardId(pb.getDashboardId()); + model.setId(pb.getId()); + model.setOptions(pb.getOptions()); + model.setText(pb.getText()); + model.setVisualizationId(pb.getVisualizationId()); + model.setWidth(pb.getWidth()); + + return model; + } + + public static class CreateWidgetSerializer extends JsonSerializer { + @Override + public void serialize(CreateWidget value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateWidgetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateWidgetDeserializer extends JsonDeserializer { + @Override + public CreateWidget deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateWidgetPb pb = mapper.readValue(p, CreateWidgetPb.class); + return CreateWidget.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidgetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidgetPb.java new file mode 100755 index 000000000..026860cf2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidgetPb.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateWidgetPb { + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonIgnore private String id; + + @JsonProperty("options") + private WidgetOptions options; + + @JsonProperty("text") + private String text; + + @JsonProperty("visualization_id") + private String visualizationId; + + @JsonProperty("width") + private Long width; + + public CreateWidgetPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public CreateWidgetPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CreateWidgetPb setOptions(WidgetOptions options) { + this.options = options; + return this; + } + + public WidgetOptions getOptions() { + return options; + } + + public CreateWidgetPb setText(String text) { + this.text = text; + return this; + } + + public String getText() { + return text; + } + + public CreateWidgetPb setVisualizationId(String visualizationId) { + this.visualizationId = visualizationId; + return this; + } + + public String getVisualizationId() { + return visualizationId; + } + + public CreateWidgetPb setWidth(Long width) { + this.width = width; + return this; + } + + public Long getWidth() { + return width; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWidgetPb that = (CreateWidgetPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(id, that.id) + && Objects.equals(options, that.options) + && Objects.equals(text, that.text) + && Objects.equals(visualizationId, that.visualizationId) + && Objects.equals(width, that.width); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, id, options, text, visualizationId, width); + } + + @Override + public String toString() { + return new ToStringer(CreateWidgetPb.class) + .add("dashboardId", dashboardId) + .add("id", id) + .add("options", options) + .add("text", text) + .add("visualizationId", visualizationId) + .add("width", width) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java index d2245c416..45323ccef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CronSchedule.CronScheduleSerializer.class) +@JsonDeserialize(using = CronSchedule.CronScheduleDeserializer.class) public class CronSchedule { /** Indicate whether this schedule is paused or not. */ - @JsonProperty("pause_status") private SchedulePauseStatus pauseStatus; /** @@ -18,7 +28,6 @@ public class CronSchedule { * the quartz format described here: * http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html */ - @JsonProperty("quartz_cron_schedule") private String quartzCronSchedule; /** @@ -27,7 +36,6 @@ public class CronSchedule { * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html * for details. */ - @JsonProperty("timezone_id") private String timezoneId; public CronSchedule setPauseStatus(SchedulePauseStatus pauseStatus) { @@ -80,4 +88,41 @@ public String toString() { .add("timezoneId", timezoneId) .toString(); } + + CronSchedulePb toPb() { + CronSchedulePb pb = new CronSchedulePb(); + pb.setPauseStatus(pauseStatus); + pb.setQuartzCronSchedule(quartzCronSchedule); + pb.setTimezoneId(timezoneId); + + return pb; + } + + static CronSchedule fromPb(CronSchedulePb pb) { + CronSchedule model = new CronSchedule(); + model.setPauseStatus(pb.getPauseStatus()); + model.setQuartzCronSchedule(pb.getQuartzCronSchedule()); + model.setTimezoneId(pb.getTimezoneId()); + + return model; + } + + public static class CronScheduleSerializer extends JsonSerializer { + @Override + public void serialize(CronSchedule value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CronSchedulePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CronScheduleDeserializer extends JsonDeserializer { + @Override + public CronSchedule deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CronSchedulePb pb = mapper.readValue(p, CronSchedulePb.class); + return CronSchedule.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedulePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedulePb.java new file mode 100755 index 000000000..4a9296450 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedulePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CronSchedulePb { + @JsonProperty("pause_status") + private SchedulePauseStatus pauseStatus; + + @JsonProperty("quartz_cron_schedule") + private String quartzCronSchedule; + + @JsonProperty("timezone_id") + private String timezoneId; + + public CronSchedulePb setPauseStatus(SchedulePauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public SchedulePauseStatus getPauseStatus() { + return pauseStatus; + } + + public CronSchedulePb setQuartzCronSchedule(String quartzCronSchedule) { + this.quartzCronSchedule = quartzCronSchedule; + return this; + } + + public String getQuartzCronSchedule() { + return quartzCronSchedule; + } + + public CronSchedulePb setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronSchedulePb that = (CronSchedulePb) o; + return Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(quartzCronSchedule, that.quartzCronSchedule) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus, quartzCronSchedule, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(CronSchedulePb.class) + .add("pauseStatus", pauseStatus) + .add("quartzCronSchedule", quartzCronSchedule) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java index 2b8ca46fd..2b3404064 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java @@ -4,30 +4,37 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** A JSON representing a dashboard containing widgets of visualizations and text boxes. */ @Generated +@JsonSerialize(using = Dashboard.DashboardSerializer.class) +@JsonDeserialize(using = Dashboard.DashboardDeserializer.class) public class Dashboard { /** Whether the authenticated user can edit the query definition. */ - @JsonProperty("can_edit") private Boolean canEdit; /** Timestamp when this dashboard was created. */ - @JsonProperty("created_at") private String createdAt; /** * In the web application, query filters that share a name are coupled to a single selection box * if this value is `true`. */ - @JsonProperty("dashboard_filters_enabled") private Boolean dashboardFiltersEnabled; /** The ID for this dashboard. */ - @JsonProperty("id") private String id; /** @@ -35,66 +42,53 @@ public class Dashboard { * this boolean is `true`, the `options` property for this dashboard includes a * `moved_to_trash_at` timestamp. Items in trash are permanently deleted after 30 days. */ - @JsonProperty("is_archived") private Boolean isArchived; /** * Whether a dashboard is a draft. Draft dashboards only appear in list views for their owners. */ - @JsonProperty("is_draft") private Boolean isDraft; /** * Indicates whether this query object appears in the current user's favorites list. This flag * determines whether the star icon for favorites is selected. */ - @JsonProperty("is_favorite") private Boolean isFavorite; /** The title of the dashboard that appears in list views and at the top of the dashboard page. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("options") private DashboardOptions options; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the * query * `CAN_MANAGE`: Can manage the query */ - @JsonProperty("permission_tier") private PermissionLevel permissionTier; /** * URL slug. Usually mirrors the query name with dashes (`-`) instead of spaces. Appears in the * URL for this query. */ - @JsonProperty("slug") private String slug; /** */ - @JsonProperty("tags") private Collection tags; /** Timestamp when this dashboard was last updated. */ - @JsonProperty("updated_at") private String updatedAt; /** */ - @JsonProperty("user") private User user; /** The ID of the user who owns the dashboard. */ - @JsonProperty("user_id") private Long userId; /** */ - @JsonProperty("widgets") private Collection widgets; public Dashboard setCanEdit(Boolean canEdit) { @@ -318,4 +312,69 @@ public String toString() { .add("widgets", widgets) .toString(); } + + DashboardPb toPb() { + DashboardPb pb = new DashboardPb(); + pb.setCanEdit(canEdit); + pb.setCreatedAt(createdAt); + pb.setDashboardFiltersEnabled(dashboardFiltersEnabled); + pb.setId(id); + pb.setIsArchived(isArchived); + pb.setIsDraft(isDraft); + pb.setIsFavorite(isFavorite); + pb.setName(name); + pb.setOptions(options); + pb.setParent(parent); + pb.setPermissionTier(permissionTier); + pb.setSlug(slug); + pb.setTags(tags); + pb.setUpdatedAt(updatedAt); + pb.setUser(user); + pb.setUserId(userId); + pb.setWidgets(widgets); + + return pb; + } + + static Dashboard fromPb(DashboardPb pb) { + Dashboard model = new Dashboard(); + model.setCanEdit(pb.getCanEdit()); + model.setCreatedAt(pb.getCreatedAt()); + model.setDashboardFiltersEnabled(pb.getDashboardFiltersEnabled()); + model.setId(pb.getId()); + model.setIsArchived(pb.getIsArchived()); + model.setIsDraft(pb.getIsDraft()); + model.setIsFavorite(pb.getIsFavorite()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setParent(pb.getParent()); + model.setPermissionTier(pb.getPermissionTier()); + model.setSlug(pb.getSlug()); + model.setTags(pb.getTags()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUser(pb.getUser()); + model.setUserId(pb.getUserId()); + model.setWidgets(pb.getWidgets()); + + return model; + } + + public static class DashboardSerializer extends JsonSerializer { + @Override + public void serialize(Dashboard value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardDeserializer extends JsonDeserializer { + @Override + public Dashboard deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardPb pb = mapper.readValue(p, DashboardPb.class); + return Dashboard.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContent.java index 421f78fae..63c7416e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContent.java @@ -4,31 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardEditContent.DashboardEditContentSerializer.class) +@JsonDeserialize(using = DashboardEditContent.DashboardEditContentDeserializer.class) public class DashboardEditContent { /** */ - @JsonIgnore private String dashboardId; + private String dashboardId; /** * The title of this dashboard that appears in list views and at the top of the dashboard page. */ - @JsonProperty("name") private String name; /** * Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as * viewer" behavior) or `"owner"` (signifying "run as owner" behavior) */ - @JsonProperty("run_as_role") private RunAsRole runAsRole; /** */ - @JsonProperty("tags") private Collection tags; public DashboardEditContent setDashboardId(String dashboardId) { @@ -92,4 +99,46 @@ public String toString() { .add("tags", tags) .toString(); } + + DashboardEditContentPb toPb() { + DashboardEditContentPb pb = new DashboardEditContentPb(); + pb.setDashboardId(dashboardId); + pb.setName(name); + pb.setRunAsRole(runAsRole); + pb.setTags(tags); + + return pb; + } + + static DashboardEditContent fromPb(DashboardEditContentPb pb) { + DashboardEditContent model = new DashboardEditContent(); + model.setDashboardId(pb.getDashboardId()); + model.setName(pb.getName()); + model.setRunAsRole(pb.getRunAsRole()); + model.setTags(pb.getTags()); + + return model; + } + + public static class DashboardEditContentSerializer extends JsonSerializer { + @Override + public void serialize( + DashboardEditContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardEditContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardEditContentDeserializer + extends JsonDeserializer { + @Override + public DashboardEditContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardEditContentPb pb = mapper.readValue(p, DashboardEditContentPb.class); + return DashboardEditContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContentPb.java new file mode 100755 index 000000000..3451042f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContentPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DashboardEditContentPb { + @JsonIgnore private String dashboardId; + + @JsonProperty("name") + private String name; + + @JsonProperty("run_as_role") + private RunAsRole runAsRole; + + @JsonProperty("tags") + private Collection tags; + + public DashboardEditContentPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DashboardEditContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DashboardEditContentPb setRunAsRole(RunAsRole runAsRole) { + this.runAsRole = runAsRole; + return this; + } + + public RunAsRole getRunAsRole() { + return runAsRole; + } + + public DashboardEditContentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardEditContentPb that = (DashboardEditContentPb) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(name, that.name) + && Objects.equals(runAsRole, that.runAsRole) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, name, runAsRole, tags); + } + + @Override + public String toString() { + return new ToStringer(DashboardEditContentPb.class) + .add("dashboardId", dashboardId) + .add("name", name) + .add("runAsRole", runAsRole) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptions.java index 2bb72bb8b..805c3c495 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptions.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardOptions.DashboardOptionsSerializer.class) +@JsonDeserialize(using = DashboardOptions.DashboardOptionsDeserializer.class) public class DashboardOptions { /** * The timestamp when this dashboard was moved to trash. Only present when the `is_archived` * property is `true`. Trashed items are deleted after thirty days. */ - @JsonProperty("moved_to_trash_at") private String movedToTrashAt; public DashboardOptions setMovedToTrashAt(String movedToTrashAt) { @@ -42,4 +52,38 @@ public int hashCode() { public String toString() { return new ToStringer(DashboardOptions.class).add("movedToTrashAt", movedToTrashAt).toString(); } + + DashboardOptionsPb toPb() { + DashboardOptionsPb pb = new DashboardOptionsPb(); + pb.setMovedToTrashAt(movedToTrashAt); + + return pb; + } + + static DashboardOptions fromPb(DashboardOptionsPb pb) { + DashboardOptions model = new DashboardOptions(); + model.setMovedToTrashAt(pb.getMovedToTrashAt()); + + return model; + } + + public static class DashboardOptionsSerializer extends JsonSerializer { + @Override + public void serialize(DashboardOptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardOptionsDeserializer extends JsonDeserializer { + @Override + public DashboardOptions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardOptionsPb pb = mapper.readValue(p, DashboardOptionsPb.class); + return DashboardOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptionsPb.java new file mode 100755 index 000000000..4ce8baeeb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptionsPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DashboardOptionsPb { + @JsonProperty("moved_to_trash_at") + private String movedToTrashAt; + + public DashboardOptionsPb setMovedToTrashAt(String movedToTrashAt) { + this.movedToTrashAt = movedToTrashAt; + return this; + } + + public String getMovedToTrashAt() { + return movedToTrashAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardOptionsPb that = (DashboardOptionsPb) o; + return Objects.equals(movedToTrashAt, that.movedToTrashAt); + } + + @Override + public int hashCode() { + return Objects.hash(movedToTrashAt); + } + + @Override + public String toString() { + return new ToStringer(DashboardOptionsPb.class) + .add("movedToTrashAt", movedToTrashAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPb.java new file mode 100755 index 000000000..c7ec7da05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPb.java @@ -0,0 +1,286 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A JSON representing a dashboard containing widgets of visualizations and text boxes. */ +@Generated +class DashboardPb { + @JsonProperty("can_edit") + private Boolean canEdit; + + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("dashboard_filters_enabled") + private Boolean dashboardFiltersEnabled; + + @JsonProperty("id") + private String id; + + @JsonProperty("is_archived") + private Boolean isArchived; + + @JsonProperty("is_draft") + private Boolean isDraft; + + @JsonProperty("is_favorite") + private Boolean isFavorite; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private DashboardOptions options; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("permission_tier") + private PermissionLevel permissionTier; + + @JsonProperty("slug") + private String slug; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("updated_at") + private String updatedAt; + + @JsonProperty("user") + private User user; + + @JsonProperty("user_id") + private Long userId; + + @JsonProperty("widgets") + private Collection widgets; + + public DashboardPb setCanEdit(Boolean canEdit) { + this.canEdit = canEdit; + return this; + } + + public Boolean getCanEdit() { + return canEdit; + } + + public DashboardPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public DashboardPb setDashboardFiltersEnabled(Boolean dashboardFiltersEnabled) { + this.dashboardFiltersEnabled = dashboardFiltersEnabled; + return this; + } + + public Boolean getDashboardFiltersEnabled() { + return dashboardFiltersEnabled; + } + + public DashboardPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public DashboardPb setIsArchived(Boolean isArchived) { + this.isArchived = isArchived; + return this; + } + + public Boolean getIsArchived() { + return isArchived; + } + + public DashboardPb setIsDraft(Boolean isDraft) { + this.isDraft = isDraft; + return this; + } + + public Boolean getIsDraft() { + return isDraft; + } + + public DashboardPb setIsFavorite(Boolean isFavorite) { + this.isFavorite = isFavorite; + return this; + } + + public Boolean getIsFavorite() { + return isFavorite; + } + + public DashboardPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DashboardPb setOptions(DashboardOptions options) { + this.options = options; + return this; + } + + public DashboardOptions getOptions() { + return options; + } + + public DashboardPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public DashboardPb setPermissionTier(PermissionLevel permissionTier) { + this.permissionTier = permissionTier; + return this; + } + + public PermissionLevel getPermissionTier() { + return permissionTier; + } + + public DashboardPb setSlug(String slug) { + this.slug = slug; + return this; + } + + public String getSlug() { + return slug; + } + + public DashboardPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public DashboardPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + public DashboardPb setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + public DashboardPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + public DashboardPb setWidgets(Collection widgets) { + this.widgets = widgets; + return this; + } + + public Collection getWidgets() { + return widgets; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardPb that = (DashboardPb) o; + return Objects.equals(canEdit, that.canEdit) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(dashboardFiltersEnabled, that.dashboardFiltersEnabled) + && Objects.equals(id, that.id) + && Objects.equals(isArchived, that.isArchived) + && Objects.equals(isDraft, that.isDraft) + && Objects.equals(isFavorite, that.isFavorite) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(parent, that.parent) + && Objects.equals(permissionTier, that.permissionTier) + && Objects.equals(slug, that.slug) + && Objects.equals(tags, that.tags) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(user, that.user) + && Objects.equals(userId, that.userId) + && Objects.equals(widgets, that.widgets); + } + + @Override + public int hashCode() { + return Objects.hash( + canEdit, + createdAt, + dashboardFiltersEnabled, + id, + isArchived, + isDraft, + isFavorite, + name, + options, + parent, + permissionTier, + slug, + tags, + updatedAt, + user, + userId, + widgets); + } + + @Override + public String toString() { + return new ToStringer(DashboardPb.class) + .add("canEdit", canEdit) + .add("createdAt", createdAt) + .add("dashboardFiltersEnabled", dashboardFiltersEnabled) + .add("id", id) + .add("isArchived", isArchived) + .add("isDraft", isDraft) + .add("isFavorite", isFavorite) + .add("name", name) + .add("options", options) + .add("parent", parent) + .add("permissionTier", permissionTier) + .add("slug", slug) + .add("tags", tags) + .add("updatedAt", updatedAt) + .add("user", user) + .add("userId", userId) + .add("widgets", widgets) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContent.java index 941a45daa..6ccca6dfc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContent.java @@ -4,39 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DashboardPostContent.DashboardPostContentSerializer.class) +@JsonDeserialize(using = DashboardPostContent.DashboardPostContentDeserializer.class) public class DashboardPostContent { /** Indicates whether the dashboard filters are enabled */ - @JsonProperty("dashboard_filters_enabled") private Boolean dashboardFiltersEnabled; /** Indicates whether this dashboard object should appear in the current user's favorites list. */ - @JsonProperty("is_favorite") private Boolean isFavorite; /** * The title of this dashboard that appears in list views and at the top of the dashboard page. */ - @JsonProperty("name") private String name; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** * Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as * viewer" behavior) or `"owner"` (signifying "run as owner" behavior) */ - @JsonProperty("run_as_role") private RunAsRole runAsRole; /** */ - @JsonProperty("tags") private Collection tags; public DashboardPostContent setDashboardFiltersEnabled(Boolean dashboardFiltersEnabled) { @@ -122,4 +127,50 @@ public String toString() { .add("tags", tags) .toString(); } + + DashboardPostContentPb toPb() { + DashboardPostContentPb pb = new DashboardPostContentPb(); + pb.setDashboardFiltersEnabled(dashboardFiltersEnabled); + pb.setIsFavorite(isFavorite); + pb.setName(name); + pb.setParent(parent); + pb.setRunAsRole(runAsRole); + pb.setTags(tags); + + return pb; + } + + static DashboardPostContent fromPb(DashboardPostContentPb pb) { + DashboardPostContent model = new DashboardPostContent(); + model.setDashboardFiltersEnabled(pb.getDashboardFiltersEnabled()); + model.setIsFavorite(pb.getIsFavorite()); + model.setName(pb.getName()); + model.setParent(pb.getParent()); + model.setRunAsRole(pb.getRunAsRole()); + model.setTags(pb.getTags()); + + return model; + } + + public static class DashboardPostContentSerializer extends JsonSerializer { + @Override + public void serialize( + DashboardPostContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DashboardPostContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DashboardPostContentDeserializer + extends JsonDeserializer { + @Override + public DashboardPostContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DashboardPostContentPb pb = mapper.readValue(p, DashboardPostContentPb.class); + return DashboardPostContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContentPb.java new file mode 100755 index 000000000..1a3dcddf6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardPostContentPb.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DashboardPostContentPb { + @JsonProperty("dashboard_filters_enabled") + private Boolean dashboardFiltersEnabled; + + @JsonProperty("is_favorite") + private Boolean isFavorite; + + @JsonProperty("name") + private String name; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("run_as_role") + private RunAsRole runAsRole; + + @JsonProperty("tags") + private Collection tags; + + public DashboardPostContentPb setDashboardFiltersEnabled(Boolean dashboardFiltersEnabled) { + this.dashboardFiltersEnabled = dashboardFiltersEnabled; + return this; + } + + public Boolean getDashboardFiltersEnabled() { + return dashboardFiltersEnabled; + } + + public DashboardPostContentPb setIsFavorite(Boolean isFavorite) { + this.isFavorite = isFavorite; + return this; + } + + public Boolean getIsFavorite() { + return isFavorite; + } + + public DashboardPostContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DashboardPostContentPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public DashboardPostContentPb setRunAsRole(RunAsRole runAsRole) { + this.runAsRole = runAsRole; + return this; + } + + public RunAsRole getRunAsRole() { + return runAsRole; + } + + public DashboardPostContentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardPostContentPb that = (DashboardPostContentPb) o; + return Objects.equals(dashboardFiltersEnabled, that.dashboardFiltersEnabled) + && Objects.equals(isFavorite, that.isFavorite) + && Objects.equals(name, that.name) + && Objects.equals(parent, that.parent) + && Objects.equals(runAsRole, that.runAsRole) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardFiltersEnabled, isFavorite, name, parent, runAsRole, tags); + } + + @Override + public String toString() { + return new ToStringer(DashboardPostContentPb.class) + .add("dashboardFiltersEnabled", dashboardFiltersEnabled) + .add("isFavorite", isFavorite) + .add("name", name) + .add("parent", parent) + .add("runAsRole", runAsRole) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java index 55287384f..11d8ee00e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java @@ -21,7 +21,7 @@ public Widget create(CreateWidget request) { String path = "/api/2.0/preview/sql/widgets"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Widget.class); @@ -35,7 +35,7 @@ public void delete(DeleteDashboardWidgetRequest request) { String path = String.format("/api/2.0/preview/sql/widgets/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public Widget update(CreateWidget request) { String path = String.format("/api/2.0/preview/sql/widgets/%s", request.getId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Widget.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java index fb45272bc..8b45f25bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java @@ -21,7 +21,7 @@ public Dashboard create(DashboardPostContent request) { String path = "/api/2.0/preview/sql/dashboards"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Dashboard.class); @@ -35,7 +35,7 @@ public void delete(DeleteDashboardRequest request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public Dashboard get(GetDashboardRequest request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Dashboard.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListResponse list(ListDashboardsRequest request) { String path = "/api/2.0/preview/sql/dashboards"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListResponse.class); } catch (IOException e) { @@ -75,7 +75,7 @@ public void restore(RestoreDashboardRequest request) { String.format("/api/2.0/preview/sql/dashboards/trash/%s", request.getDashboardId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, RestoreResponse.class); } catch (IOException e) { @@ -88,7 +88,7 @@ public Dashboard update(DashboardEditContent request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Dashboard.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSource.java index 3dfc9f064..e52a58735 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSource.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** A JSON object representing a DBSQL data source / SQL warehouse. */ @Generated +@JsonSerialize(using = DataSource.DataSourceSerializer.class) +@JsonDeserialize(using = DataSource.DataSourceDeserializer.class) public class DataSource { /** * Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -16,42 +27,33 @@ public class DataSource { * *

[Learn more]: https://docs.databricks.com/api/workspace/datasources/list */ - @JsonProperty("id") private String id; /** * The string name of this data source / SQL warehouse as it appears in the Databricks SQL web * application. */ - @JsonProperty("name") private String name; /** Reserved for internal use. */ - @JsonProperty("pause_reason") private String pauseReason; /** Reserved for internal use. */ - @JsonProperty("paused") private Long paused; /** Reserved for internal use. */ - @JsonProperty("supports_auto_limit") private Boolean supportsAutoLimit; /** Reserved for internal use. */ - @JsonProperty("syntax") private String syntax; /** The type of data source. For SQL warehouses, this will be `databricks_internal`. */ - @JsonProperty("type") private String typeValue; /** Reserved for internal use. */ - @JsonProperty("view_only") private Boolean viewOnly; /** The ID of the associated SQL warehouse, if this data source is backed by a SQL warehouse. */ - @JsonProperty("warehouse_id") private String warehouseId; public DataSource setId(String id) { @@ -171,4 +173,53 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + DataSourcePb toPb() { + DataSourcePb pb = new DataSourcePb(); + pb.setId(id); + pb.setName(name); + pb.setPauseReason(pauseReason); + pb.setPaused(paused); + pb.setSupportsAutoLimit(supportsAutoLimit); + pb.setSyntax(syntax); + pb.setType(typeValue); + pb.setViewOnly(viewOnly); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static DataSource fromPb(DataSourcePb pb) { + DataSource model = new DataSource(); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setPauseReason(pb.getPauseReason()); + model.setPaused(pb.getPaused()); + model.setSupportsAutoLimit(pb.getSupportsAutoLimit()); + model.setSyntax(pb.getSyntax()); + model.setType(pb.getType()); + model.setViewOnly(pb.getViewOnly()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class DataSourceSerializer extends JsonSerializer { + @Override + public void serialize(DataSource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DataSourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DataSourceDeserializer extends JsonDeserializer { + @Override + public DataSource deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DataSourcePb pb = mapper.readValue(p, DataSourcePb.class); + return DataSource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcePb.java new file mode 100755 index 000000000..f341ed71d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcePb.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A JSON object representing a DBSQL data source / SQL warehouse. */ +@Generated +class DataSourcePb { + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("pause_reason") + private String pauseReason; + + @JsonProperty("paused") + private Long paused; + + @JsonProperty("supports_auto_limit") + private Boolean supportsAutoLimit; + + @JsonProperty("syntax") + private String syntax; + + @JsonProperty("type") + private String typeValue; + + @JsonProperty("view_only") + private Boolean viewOnly; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public DataSourcePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public DataSourcePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DataSourcePb setPauseReason(String pauseReason) { + this.pauseReason = pauseReason; + return this; + } + + public String getPauseReason() { + return pauseReason; + } + + public DataSourcePb setPaused(Long paused) { + this.paused = paused; + return this; + } + + public Long getPaused() { + return paused; + } + + public DataSourcePb setSupportsAutoLimit(Boolean supportsAutoLimit) { + this.supportsAutoLimit = supportsAutoLimit; + return this; + } + + public Boolean getSupportsAutoLimit() { + return supportsAutoLimit; + } + + public DataSourcePb setSyntax(String syntax) { + this.syntax = syntax; + return this; + } + + public String getSyntax() { + return syntax; + } + + public DataSourcePb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + public DataSourcePb setViewOnly(Boolean viewOnly) { + this.viewOnly = viewOnly; + return this; + } + + public Boolean getViewOnly() { + return viewOnly; + } + + public DataSourcePb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataSourcePb that = (DataSourcePb) o; + return Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(pauseReason, that.pauseReason) + && Objects.equals(paused, that.paused) + && Objects.equals(supportsAutoLimit, that.supportsAutoLimit) + && Objects.equals(syntax, that.syntax) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(viewOnly, that.viewOnly) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + id, name, pauseReason, paused, supportsAutoLimit, syntax, typeValue, viewOnly, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DataSourcePb.class) + .add("id", id) + .add("name", name) + .add("pauseReason", pauseReason) + .add("paused", paused) + .add("supportsAutoLimit", supportsAutoLimit) + .add("syntax", syntax) + .add("typeValue", typeValue) + .add("viewOnly", viewOnly) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRange.java index 34da06295..b8397b468 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRange.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DateRange.DateRangeSerializer.class) +@JsonDeserialize(using = DateRange.DateRangeDeserializer.class) public class DateRange { /** */ - @JsonProperty("end") private String end; /** */ - @JsonProperty("start") private String start; public DateRange setEnd(String end) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(DateRange.class).add("end", end).add("start", start).toString(); } + + DateRangePb toPb() { + DateRangePb pb = new DateRangePb(); + pb.setEnd(end); + pb.setStart(start); + + return pb; + } + + static DateRange fromPb(DateRangePb pb) { + DateRange model = new DateRange(); + model.setEnd(pb.getEnd()); + model.setStart(pb.getStart()); + + return model; + } + + public static class DateRangeSerializer extends JsonSerializer { + @Override + public void serialize(DateRange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DateRangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DateRangeDeserializer extends JsonDeserializer { + @Override + public DateRange deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DateRangePb pb = mapper.readValue(p, DateRangePb.class); + return DateRange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangePb.java new file mode 100755 index 000000000..a9665c6a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DateRangePb { + @JsonProperty("end") + private String end; + + @JsonProperty("start") + private String start; + + public DateRangePb setEnd(String end) { + this.end = end; + return this; + } + + public String getEnd() { + return end; + } + + public DateRangePb setStart(String start) { + this.start = start; + return this; + } + + public String getStart() { + return start; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DateRangePb that = (DateRangePb) o; + return Objects.equals(end, that.end) && Objects.equals(start, that.start); + } + + @Override + public int hashCode() { + return Objects.hash(end, start); + } + + @Override + public String toString() { + return new ToStringer(DateRangePb.class).add("end", end).add("start", start).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValue.java index cd955201c..644f285f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValue.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DateRangeValue.DateRangeValueSerializer.class) +@JsonDeserialize(using = DateRangeValue.DateRangeValueDeserializer.class) public class DateRangeValue { /** Manually specified date-time range value. */ - @JsonProperty("date_range_value") private DateRange dateRangeValue; /** Dynamic date-time range value based on current date-time. */ - @JsonProperty("dynamic_date_range_value") private DateRangeValueDynamicDateRange dynamicDateRangeValue; /** * Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION * (YYYY-MM-DD). */ - @JsonProperty("precision") private DatePrecision precision; /** */ - @JsonProperty("start_day_of_week") private Long startDayOfWeek; public DateRangeValue setDateRangeValue(DateRange dateRangeValue) { @@ -90,4 +97,44 @@ public String toString() { .add("startDayOfWeek", startDayOfWeek) .toString(); } + + DateRangeValuePb toPb() { + DateRangeValuePb pb = new DateRangeValuePb(); + pb.setDateRangeValue(dateRangeValue); + pb.setDynamicDateRangeValue(dynamicDateRangeValue); + pb.setPrecision(precision); + pb.setStartDayOfWeek(startDayOfWeek); + + return pb; + } + + static DateRangeValue fromPb(DateRangeValuePb pb) { + DateRangeValue model = new DateRangeValue(); + model.setDateRangeValue(pb.getDateRangeValue()); + model.setDynamicDateRangeValue(pb.getDynamicDateRangeValue()); + model.setPrecision(pb.getPrecision()); + model.setStartDayOfWeek(pb.getStartDayOfWeek()); + + return model; + } + + public static class DateRangeValueSerializer extends JsonSerializer { + @Override + public void serialize(DateRangeValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DateRangeValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DateRangeValueDeserializer extends JsonDeserializer { + @Override + public DateRangeValue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DateRangeValuePb pb = mapper.readValue(p, DateRangeValuePb.class); + return DateRangeValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValuePb.java new file mode 100755 index 000000000..ac72c2fb2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateRangeValuePb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DateRangeValuePb { + @JsonProperty("date_range_value") + private DateRange dateRangeValue; + + @JsonProperty("dynamic_date_range_value") + private DateRangeValueDynamicDateRange dynamicDateRangeValue; + + @JsonProperty("precision") + private DatePrecision precision; + + @JsonProperty("start_day_of_week") + private Long startDayOfWeek; + + public DateRangeValuePb setDateRangeValue(DateRange dateRangeValue) { + this.dateRangeValue = dateRangeValue; + return this; + } + + public DateRange getDateRangeValue() { + return dateRangeValue; + } + + public DateRangeValuePb setDynamicDateRangeValue( + DateRangeValueDynamicDateRange dynamicDateRangeValue) { + this.dynamicDateRangeValue = dynamicDateRangeValue; + return this; + } + + public DateRangeValueDynamicDateRange getDynamicDateRangeValue() { + return dynamicDateRangeValue; + } + + public DateRangeValuePb setPrecision(DatePrecision precision) { + this.precision = precision; + return this; + } + + public DatePrecision getPrecision() { + return precision; + } + + public DateRangeValuePb setStartDayOfWeek(Long startDayOfWeek) { + this.startDayOfWeek = startDayOfWeek; + return this; + } + + public Long getStartDayOfWeek() { + return startDayOfWeek; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DateRangeValuePb that = (DateRangeValuePb) o; + return Objects.equals(dateRangeValue, that.dateRangeValue) + && Objects.equals(dynamicDateRangeValue, that.dynamicDateRangeValue) + && Objects.equals(precision, that.precision) + && Objects.equals(startDayOfWeek, that.startDayOfWeek); + } + + @Override + public int hashCode() { + return Objects.hash(dateRangeValue, dynamicDateRangeValue, precision, startDayOfWeek); + } + + @Override + public String toString() { + return new ToStringer(DateRangeValuePb.class) + .add("dateRangeValue", dateRangeValue) + .add("dynamicDateRangeValue", dynamicDateRangeValue) + .add("precision", precision) + .add("startDayOfWeek", startDayOfWeek) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValue.java index 4125832ec..8cd28c280 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValue.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DateValue.DateValueSerializer.class) +@JsonDeserialize(using = DateValue.DateValueDeserializer.class) public class DateValue { /** Manually specified date-time value. */ - @JsonProperty("date_value") private String dateValue; /** Dynamic date-time value based on current date-time. */ - @JsonProperty("dynamic_date_value") private DateValueDynamicDate dynamicDateValue; /** * Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION * (YYYY-MM-DD). */ - @JsonProperty("precision") private DatePrecision precision; public DateValue setDateValue(String dateValue) { @@ -74,4 +82,41 @@ public String toString() { .add("precision", precision) .toString(); } + + DateValuePb toPb() { + DateValuePb pb = new DateValuePb(); + pb.setDateValue(dateValue); + pb.setDynamicDateValue(dynamicDateValue); + pb.setPrecision(precision); + + return pb; + } + + static DateValue fromPb(DateValuePb pb) { + DateValue model = new DateValue(); + model.setDateValue(pb.getDateValue()); + model.setDynamicDateValue(pb.getDynamicDateValue()); + model.setPrecision(pb.getPrecision()); + + return model; + } + + public static class DateValueSerializer extends JsonSerializer { + @Override + public void serialize(DateValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DateValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DateValueDeserializer extends JsonDeserializer { + @Override + public DateValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DateValuePb pb = mapper.readValue(p, DateValuePb.class); + return DateValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValuePb.java new file mode 100755 index 000000000..3450bb803 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DateValuePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DateValuePb { + @JsonProperty("date_value") + private String dateValue; + + @JsonProperty("dynamic_date_value") + private DateValueDynamicDate dynamicDateValue; + + @JsonProperty("precision") + private DatePrecision precision; + + public DateValuePb setDateValue(String dateValue) { + this.dateValue = dateValue; + return this; + } + + public String getDateValue() { + return dateValue; + } + + public DateValuePb setDynamicDateValue(DateValueDynamicDate dynamicDateValue) { + this.dynamicDateValue = dynamicDateValue; + return this; + } + + public DateValueDynamicDate getDynamicDateValue() { + return dynamicDateValue; + } + + public DateValuePb setPrecision(DatePrecision precision) { + this.precision = precision; + return this; + } + + public DatePrecision getPrecision() { + return precision; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DateValuePb that = (DateValuePb) o; + return Objects.equals(dateValue, that.dateValue) + && Objects.equals(dynamicDateValue, that.dynamicDateValue) + && Objects.equals(precision, that.precision); + } + + @Override + public int hashCode() { + return Objects.hash(dateValue, dynamicDateValue, precision); + } + + @Override + public String toString() { + return new ToStringer(DateValuePb.class) + .add("dateValue", dateValue) + .add("dynamicDateValue", dynamicDateValue) + .add("precision", precision) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java index 858e01493..0e1f7736e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java @@ -24,7 +24,7 @@ public GetResponse get(GetDbsqlPermissionRequest request) { request.getObjectType(), request.getObjectId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetResponse.class); } catch (IOException e) { @@ -40,7 +40,7 @@ public SetResponse set(SetRequest request) { request.getObjectType(), request.getObjectId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, SetResponse.class); @@ -57,7 +57,7 @@ public Success transferOwnership(TransferOwnershipRequest request) { request.getObjectType(), request.getObjectId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Success.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequest.java index 7a6613aff..561f617a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an alert */ @Generated +@JsonSerialize(using = DeleteAlertsLegacyRequest.DeleteAlertsLegacyRequestSerializer.class) +@JsonDeserialize(using = DeleteAlertsLegacyRequest.DeleteAlertsLegacyRequestDeserializer.class) public class DeleteAlertsLegacyRequest { /** */ - @JsonIgnore private String alertId; + private String alertId; public DeleteAlertsLegacyRequest setAlertId(String alertId) { this.alertId = alertId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAlertsLegacyRequest.class).add("alertId", alertId).toString(); } + + DeleteAlertsLegacyRequestPb toPb() { + DeleteAlertsLegacyRequestPb pb = new DeleteAlertsLegacyRequestPb(); + pb.setAlertId(alertId); + + return pb; + } + + static DeleteAlertsLegacyRequest fromPb(DeleteAlertsLegacyRequestPb pb) { + DeleteAlertsLegacyRequest model = new DeleteAlertsLegacyRequest(); + model.setAlertId(pb.getAlertId()); + + return model; + } + + public static class DeleteAlertsLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteAlertsLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAlertsLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAlertsLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteAlertsLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAlertsLegacyRequestPb pb = mapper.readValue(p, DeleteAlertsLegacyRequestPb.class); + return DeleteAlertsLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequestPb.java new file mode 100755 index 000000000..31d33534b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteAlertsLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an alert */ +@Generated +class DeleteAlertsLegacyRequestPb { + @JsonIgnore private String alertId; + + public DeleteAlertsLegacyRequestPb setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAlertsLegacyRequestPb that = (DeleteAlertsLegacyRequestPb) o; + return Objects.equals(alertId, that.alertId); + } + + @Override + public int hashCode() { + return Objects.hash(alertId); + } + + @Override + public String toString() { + return new ToStringer(DeleteAlertsLegacyRequestPb.class).add("alertId", alertId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequest.java index d58ac278b..6be867283 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Remove a dashboard */ @Generated +@JsonSerialize(using = DeleteDashboardRequest.DeleteDashboardRequestSerializer.class) +@JsonDeserialize(using = DeleteDashboardRequest.DeleteDashboardRequestDeserializer.class) public class DeleteDashboardRequest { /** */ - @JsonIgnore private String dashboardId; + private String dashboardId; public DeleteDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDashboardRequest.class).add("dashboardId", dashboardId).toString(); } + + DeleteDashboardRequestPb toPb() { + DeleteDashboardRequestPb pb = new DeleteDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static DeleteDashboardRequest fromPb(DeleteDashboardRequestPb pb) { + DeleteDashboardRequest model = new DeleteDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class DeleteDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDashboardRequestPb pb = mapper.readValue(p, DeleteDashboardRequestPb.class); + return DeleteDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequestPb.java new file mode 100755 index 000000000..ce344d993 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Remove a dashboard */ +@Generated +class DeleteDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public DeleteDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDashboardRequestPb that = (DeleteDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(DeleteDashboardRequestPb.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java index 83ae374d7..39ae06a76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Remove widget */ @Generated +@JsonSerialize(using = DeleteDashboardWidgetRequest.DeleteDashboardWidgetRequestSerializer.class) +@JsonDeserialize( + using = DeleteDashboardWidgetRequest.DeleteDashboardWidgetRequestDeserializer.class) public class DeleteDashboardWidgetRequest { /** Widget ID returned by :method:dashboardwidgets/create */ - @JsonIgnore private String id; + private String id; public DeleteDashboardWidgetRequest setId(String id) { this.id = id; @@ -39,4 +51,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteDashboardWidgetRequest.class).add("id", id).toString(); } + + DeleteDashboardWidgetRequestPb toPb() { + DeleteDashboardWidgetRequestPb pb = new DeleteDashboardWidgetRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteDashboardWidgetRequest fromPb(DeleteDashboardWidgetRequestPb pb) { + DeleteDashboardWidgetRequest model = new DeleteDashboardWidgetRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteDashboardWidgetRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDashboardWidgetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDashboardWidgetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDashboardWidgetRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDashboardWidgetRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDashboardWidgetRequestPb pb = mapper.readValue(p, DeleteDashboardWidgetRequestPb.class); + return DeleteDashboardWidgetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequestPb.java new file mode 100755 index 000000000..029fea7ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Remove widget */ +@Generated +class DeleteDashboardWidgetRequestPb { + @JsonIgnore private String id; + + public DeleteDashboardWidgetRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDashboardWidgetRequestPb that = (DeleteDashboardWidgetRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteDashboardWidgetRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java index 5d73085bb..415972570 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a query */ @Generated +@JsonSerialize(using = DeleteQueriesLegacyRequest.DeleteQueriesLegacyRequestSerializer.class) +@JsonDeserialize(using = DeleteQueriesLegacyRequest.DeleteQueriesLegacyRequestDeserializer.class) public class DeleteQueriesLegacyRequest { /** */ - @JsonIgnore private String queryId; + private String queryId; public DeleteQueriesLegacyRequest setQueryId(String queryId) { this.queryId = queryId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteQueriesLegacyRequest.class).add("queryId", queryId).toString(); } + + DeleteQueriesLegacyRequestPb toPb() { + DeleteQueriesLegacyRequestPb pb = new DeleteQueriesLegacyRequestPb(); + pb.setQueryId(queryId); + + return pb; + } + + static DeleteQueriesLegacyRequest fromPb(DeleteQueriesLegacyRequestPb pb) { + DeleteQueriesLegacyRequest model = new DeleteQueriesLegacyRequest(); + model.setQueryId(pb.getQueryId()); + + return model; + } + + public static class DeleteQueriesLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteQueriesLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteQueriesLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteQueriesLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteQueriesLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteQueriesLegacyRequestPb pb = mapper.readValue(p, DeleteQueriesLegacyRequestPb.class); + return DeleteQueriesLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequestPb.java new file mode 100755 index 000000000..683fc56cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a query */ +@Generated +class DeleteQueriesLegacyRequestPb { + @JsonIgnore private String queryId; + + public DeleteQueriesLegacyRequestPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQueriesLegacyRequestPb that = (DeleteQueriesLegacyRequestPb) o; + return Objects.equals(queryId, that.queryId); + } + + @Override + public int hashCode() { + return Objects.hash(queryId); + } + + @Override + public String toString() { + return new ToStringer(DeleteQueriesLegacyRequestPb.class).add("queryId", queryId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java index 9607c7066..202c2b2e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java @@ -4,14 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Remove visualization */ @Generated +@JsonSerialize( + using = + DeleteQueryVisualizationsLegacyRequest.DeleteQueryVisualizationsLegacyRequestSerializer + .class) +@JsonDeserialize( + using = + DeleteQueryVisualizationsLegacyRequest.DeleteQueryVisualizationsLegacyRequestDeserializer + .class) public class DeleteQueryVisualizationsLegacyRequest { /** Widget ID returned by :method:queryvizualisations/create */ - @JsonIgnore private String id; + private String id; public DeleteQueryVisualizationsLegacyRequest setId(String id) { this.id = id; @@ -39,4 +56,45 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteQueryVisualizationsLegacyRequest.class).add("id", id).toString(); } + + DeleteQueryVisualizationsLegacyRequestPb toPb() { + DeleteQueryVisualizationsLegacyRequestPb pb = new DeleteQueryVisualizationsLegacyRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteQueryVisualizationsLegacyRequest fromPb( + DeleteQueryVisualizationsLegacyRequestPb pb) { + DeleteQueryVisualizationsLegacyRequest model = new DeleteQueryVisualizationsLegacyRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteQueryVisualizationsLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteQueryVisualizationsLegacyRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + DeleteQueryVisualizationsLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteQueryVisualizationsLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteQueryVisualizationsLegacyRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteQueryVisualizationsLegacyRequestPb pb = + mapper.readValue(p, DeleteQueryVisualizationsLegacyRequestPb.class); + return DeleteQueryVisualizationsLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequestPb.java new file mode 100755 index 000000000..7de736642 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Remove visualization */ +@Generated +class DeleteQueryVisualizationsLegacyRequestPb { + @JsonIgnore private String id; + + public DeleteQueryVisualizationsLegacyRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQueryVisualizationsLegacyRequestPb that = (DeleteQueryVisualizationsLegacyRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteQueryVisualizationsLegacyRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java index 8395b3a12..b7e646a12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponsePb.java new file mode 100755 index 000000000..015d6f4a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java index f8d7fa8ac..0d51ad1a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Remove a visualization */ @Generated +@JsonSerialize(using = DeleteVisualizationRequest.DeleteVisualizationRequestSerializer.class) +@JsonDeserialize(using = DeleteVisualizationRequest.DeleteVisualizationRequestDeserializer.class) public class DeleteVisualizationRequest { /** */ - @JsonIgnore private String id; + private String id; public DeleteVisualizationRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteVisualizationRequest.class).add("id", id).toString(); } + + DeleteVisualizationRequestPb toPb() { + DeleteVisualizationRequestPb pb = new DeleteVisualizationRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteVisualizationRequest fromPb(DeleteVisualizationRequestPb pb) { + DeleteVisualizationRequest model = new DeleteVisualizationRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteVisualizationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteVisualizationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteVisualizationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteVisualizationRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteVisualizationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteVisualizationRequestPb pb = mapper.readValue(p, DeleteVisualizationRequestPb.class); + return DeleteVisualizationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequestPb.java new file mode 100755 index 000000000..2c24ae3cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Remove a visualization */ +@Generated +class DeleteVisualizationRequestPb { + @JsonIgnore private String id; + + public DeleteVisualizationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteVisualizationRequestPb that = (DeleteVisualizationRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteVisualizationRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java index c3f6a1d61..cc963bad8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a warehouse */ @Generated +@JsonSerialize(using = DeleteWarehouseRequest.DeleteWarehouseRequestSerializer.class) +@JsonDeserialize(using = DeleteWarehouseRequest.DeleteWarehouseRequestDeserializer.class) public class DeleteWarehouseRequest { /** Required. Id of the SQL warehouse. */ - @JsonIgnore private String id; + private String id; public DeleteWarehouseRequest setId(String id) { this.id = id; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWarehouseRequest.class).add("id", id).toString(); } + + DeleteWarehouseRequestPb toPb() { + DeleteWarehouseRequestPb pb = new DeleteWarehouseRequestPb(); + pb.setId(id); + + return pb; + } + + static DeleteWarehouseRequest fromPb(DeleteWarehouseRequestPb pb) { + DeleteWarehouseRequest model = new DeleteWarehouseRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class DeleteWarehouseRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWarehouseRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWarehouseRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWarehouseRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteWarehouseRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWarehouseRequestPb pb = mapper.readValue(p, DeleteWarehouseRequestPb.class); + return DeleteWarehouseRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequestPb.java new file mode 100755 index 000000000..6923d0847 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a warehouse */ +@Generated +class DeleteWarehouseRequestPb { + @JsonIgnore private String id; + + public DeleteWarehouseRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWarehouseRequestPb that = (DeleteWarehouseRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteWarehouseRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java index 17a6c84e4..61220cdf2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteWarehouseResponse.DeleteWarehouseResponseSerializer.class) +@JsonDeserialize(using = DeleteWarehouseResponse.DeleteWarehouseResponseDeserializer.class) public class DeleteWarehouseResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteWarehouseResponse.class).toString(); } + + DeleteWarehouseResponsePb toPb() { + DeleteWarehouseResponsePb pb = new DeleteWarehouseResponsePb(); + + return pb; + } + + static DeleteWarehouseResponse fromPb(DeleteWarehouseResponsePb pb) { + DeleteWarehouseResponse model = new DeleteWarehouseResponse(); + + return model; + } + + public static class DeleteWarehouseResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteWarehouseResponsePb pb = mapper.readValue(p, DeleteWarehouseResponsePb.class); + return DeleteWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponsePb.java new file mode 100755 index 000000000..2b7846f11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteWarehouseResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteWarehouseResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java index db610f662..62c177dc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditAlert.EditAlertSerializer.class) +@JsonDeserialize(using = EditAlert.EditAlertDeserializer.class) public class EditAlert { /** */ - @JsonIgnore private String alertId; + private String alertId; /** Name of the alert. */ - @JsonProperty("name") private String name; /** Alert configuration options. */ - @JsonProperty("options") private AlertOptions options; /** Query ID. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds after being triggered before the alert rearms itself and can be triggered * again. If `null`, alert will never be triggered again. */ - @JsonProperty("rearm") private Long rearm; public EditAlert setAlertId(String alertId) { @@ -104,4 +110,45 @@ public String toString() { .add("rearm", rearm) .toString(); } + + EditAlertPb toPb() { + EditAlertPb pb = new EditAlertPb(); + pb.setAlertId(alertId); + pb.setName(name); + pb.setOptions(options); + pb.setQueryId(queryId); + pb.setRearm(rearm); + + return pb; + } + + static EditAlert fromPb(EditAlertPb pb) { + EditAlert model = new EditAlert(); + model.setAlertId(pb.getAlertId()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setQueryId(pb.getQueryId()); + model.setRearm(pb.getRearm()); + + return model; + } + + public static class EditAlertSerializer extends JsonSerializer { + @Override + public void serialize(EditAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditAlertDeserializer extends JsonDeserializer { + @Override + public EditAlert deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditAlertPb pb = mapper.readValue(p, EditAlertPb.class); + return EditAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlertPb.java new file mode 100755 index 000000000..b27621e9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlertPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EditAlertPb { + @JsonIgnore private String alertId; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private AlertOptions options; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("rearm") + private Long rearm; + + public EditAlertPb setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + public EditAlertPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EditAlertPb setOptions(AlertOptions options) { + this.options = options; + return this; + } + + public AlertOptions getOptions() { + return options; + } + + public EditAlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public EditAlertPb setRearm(Long rearm) { + this.rearm = rearm; + return this; + } + + public Long getRearm() { + return rearm; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditAlertPb that = (EditAlertPb) o; + return Objects.equals(alertId, that.alertId) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(queryId, that.queryId) + && Objects.equals(rearm, that.rearm); + } + + @Override + public int hashCode() { + return Objects.hash(alertId, name, options, queryId, rearm); + } + + @Override + public String toString() { + return new ToStringer(EditAlertPb.class) + .add("alertId", alertId) + .add("name", name) + .add("options", options) + .add("queryId", queryId) + .add("rearm", rearm) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java index 726c96cd6..90bcc8fdd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java @@ -4,11 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditWarehouseRequest.EditWarehouseRequestSerializer.class) +@JsonDeserialize(using = EditWarehouseRequest.EditWarehouseRequestDeserializer.class) public class EditWarehouseRequest { /** * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) @@ -18,11 +28,9 @@ public class EditWarehouseRequest { * *

Defaults to 120 mins */ - @JsonProperty("auto_stop_mins") private Long autoStopMins; /** Channel Details */ - @JsonProperty("channel") private Channel channel; /** @@ -33,11 +41,9 @@ public class EditWarehouseRequest { *

Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - * 3X-Large - 4X-Large */ - @JsonProperty("cluster_size") private String clusterSize; /** warehouse creator name */ - @JsonProperty("creator_name") private String creatorName; /** @@ -45,18 +51,15 @@ public class EditWarehouseRequest { * *

Defaults to false. */ - @JsonProperty("enable_photon") private Boolean enablePhoton; /** Configures whether the warehouse should use serverless compute. */ - @JsonProperty("enable_serverless_compute") private Boolean enableServerlessCompute; /** Required. Id of the warehouse to configure. */ - @JsonIgnore private String id; + private String id; /** Deprecated. Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** @@ -66,7 +69,6 @@ public class EditWarehouseRequest { * *

Defaults to min_clusters if unset. */ - @JsonProperty("max_num_clusters") private Long maxNumClusters; /** @@ -79,7 +81,6 @@ public class EditWarehouseRequest { * *

Defaults to 1 */ - @JsonProperty("min_num_clusters") private Long minNumClusters; /** @@ -87,11 +88,9 @@ public class EditWarehouseRequest { * *

Supported values: - Must be unique within an org. - Must be less than 100 characters. */ - @JsonProperty("name") private String name; /** Configurations whether the warehouse should use spot instances. */ - @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; /** @@ -100,14 +99,12 @@ public class EditWarehouseRequest { * *

Supported values: - Number of tags < 45. */ - @JsonProperty("tags") private EndpointTags tags; /** * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to * `PRO` and also set the field `enable_serverless_compute` to `true`. */ - @JsonProperty("warehouse_type") private EditWarehouseRequestWarehouseType warehouseType; public EditWarehouseRequest setAutoStopMins(Long autoStopMins) { @@ -295,4 +292,66 @@ public String toString() { .add("warehouseType", warehouseType) .toString(); } + + EditWarehouseRequestPb toPb() { + EditWarehouseRequestPb pb = new EditWarehouseRequestPb(); + pb.setAutoStopMins(autoStopMins); + pb.setChannel(channel); + pb.setClusterSize(clusterSize); + pb.setCreatorName(creatorName); + pb.setEnablePhoton(enablePhoton); + pb.setEnableServerlessCompute(enableServerlessCompute); + pb.setId(id); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setMaxNumClusters(maxNumClusters); + pb.setMinNumClusters(minNumClusters); + pb.setName(name); + pb.setSpotInstancePolicy(spotInstancePolicy); + pb.setTags(tags); + pb.setWarehouseType(warehouseType); + + return pb; + } + + static EditWarehouseRequest fromPb(EditWarehouseRequestPb pb) { + EditWarehouseRequest model = new EditWarehouseRequest(); + model.setAutoStopMins(pb.getAutoStopMins()); + model.setChannel(pb.getChannel()); + model.setClusterSize(pb.getClusterSize()); + model.setCreatorName(pb.getCreatorName()); + model.setEnablePhoton(pb.getEnablePhoton()); + model.setEnableServerlessCompute(pb.getEnableServerlessCompute()); + model.setId(pb.getId()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setMaxNumClusters(pb.getMaxNumClusters()); + model.setMinNumClusters(pb.getMinNumClusters()); + model.setName(pb.getName()); + model.setSpotInstancePolicy(pb.getSpotInstancePolicy()); + model.setTags(pb.getTags()); + model.setWarehouseType(pb.getWarehouseType()); + + return model; + } + + public static class EditWarehouseRequestSerializer extends JsonSerializer { + @Override + public void serialize( + EditWarehouseRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditWarehouseRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditWarehouseRequestDeserializer + extends JsonDeserializer { + @Override + public EditWarehouseRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditWarehouseRequestPb pb = mapper.readValue(p, EditWarehouseRequestPb.class); + return EditWarehouseRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestPb.java new file mode 100755 index 000000000..d799fc87f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestPb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EditWarehouseRequestPb { + @JsonProperty("auto_stop_mins") + private Long autoStopMins; + + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("cluster_size") + private String clusterSize; + + @JsonProperty("creator_name") + private String creatorName; + + @JsonProperty("enable_photon") + private Boolean enablePhoton; + + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + + @JsonIgnore private String id; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("max_num_clusters") + private Long maxNumClusters; + + @JsonProperty("min_num_clusters") + private Long minNumClusters; + + @JsonProperty("name") + private String name; + + @JsonProperty("spot_instance_policy") + private SpotInstancePolicy spotInstancePolicy; + + @JsonProperty("tags") + private EndpointTags tags; + + @JsonProperty("warehouse_type") + private EditWarehouseRequestWarehouseType warehouseType; + + public EditWarehouseRequestPb setAutoStopMins(Long autoStopMins) { + this.autoStopMins = autoStopMins; + return this; + } + + public Long getAutoStopMins() { + return autoStopMins; + } + + public EditWarehouseRequestPb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public EditWarehouseRequestPb setClusterSize(String clusterSize) { + this.clusterSize = clusterSize; + return this; + } + + public String getClusterSize() { + return clusterSize; + } + + public EditWarehouseRequestPb setCreatorName(String creatorName) { + this.creatorName = creatorName; + return this; + } + + public String getCreatorName() { + return creatorName; + } + + public EditWarehouseRequestPb setEnablePhoton(Boolean enablePhoton) { + this.enablePhoton = enablePhoton; + return this; + } + + public Boolean getEnablePhoton() { + return enablePhoton; + } + + public EditWarehouseRequestPb setEnableServerlessCompute(Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + + public EditWarehouseRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public EditWarehouseRequestPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public EditWarehouseRequestPb setMaxNumClusters(Long maxNumClusters) { + this.maxNumClusters = maxNumClusters; + return this; + } + + public Long getMaxNumClusters() { + return maxNumClusters; + } + + public EditWarehouseRequestPb setMinNumClusters(Long minNumClusters) { + this.minNumClusters = minNumClusters; + return this; + } + + public Long getMinNumClusters() { + return minNumClusters; + } + + public EditWarehouseRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EditWarehouseRequestPb setSpotInstancePolicy(SpotInstancePolicy spotInstancePolicy) { + this.spotInstancePolicy = spotInstancePolicy; + return this; + } + + public SpotInstancePolicy getSpotInstancePolicy() { + return spotInstancePolicy; + } + + public EditWarehouseRequestPb setTags(EndpointTags tags) { + this.tags = tags; + return this; + } + + public EndpointTags getTags() { + return tags; + } + + public EditWarehouseRequestPb setWarehouseType(EditWarehouseRequestWarehouseType warehouseType) { + this.warehouseType = warehouseType; + return this; + } + + public EditWarehouseRequestWarehouseType getWarehouseType() { + return warehouseType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EditWarehouseRequestPb that = (EditWarehouseRequestPb) o; + return Objects.equals(autoStopMins, that.autoStopMins) + && Objects.equals(channel, that.channel) + && Objects.equals(clusterSize, that.clusterSize) + && Objects.equals(creatorName, that.creatorName) + && Objects.equals(enablePhoton, that.enablePhoton) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) + && Objects.equals(id, that.id) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(maxNumClusters, that.maxNumClusters) + && Objects.equals(minNumClusters, that.minNumClusters) + && Objects.equals(name, that.name) + && Objects.equals(spotInstancePolicy, that.spotInstancePolicy) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseType, that.warehouseType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoStopMins, + channel, + clusterSize, + creatorName, + enablePhoton, + enableServerlessCompute, + id, + instanceProfileArn, + maxNumClusters, + minNumClusters, + name, + spotInstancePolicy, + tags, + warehouseType); + } + + @Override + public String toString() { + return new ToStringer(EditWarehouseRequestPb.class) + .add("autoStopMins", autoStopMins) + .add("channel", channel) + .add("clusterSize", clusterSize) + .add("creatorName", creatorName) + .add("enablePhoton", enablePhoton) + .add("enableServerlessCompute", enableServerlessCompute) + .add("id", id) + .add("instanceProfileArn", instanceProfileArn) + .add("maxNumClusters", maxNumClusters) + .add("minNumClusters", minNumClusters) + .add("name", name) + .add("spotInstancePolicy", spotInstancePolicy) + .add("tags", tags) + .add("warehouseType", warehouseType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java index 0a222ec8a..6a33fbc1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EditWarehouseResponse.EditWarehouseResponseSerializer.class) +@JsonDeserialize(using = EditWarehouseResponse.EditWarehouseResponseDeserializer.class) public class EditWarehouseResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(EditWarehouseResponse.class).toString(); } + + EditWarehouseResponsePb toPb() { + EditWarehouseResponsePb pb = new EditWarehouseResponsePb(); + + return pb; + } + + static EditWarehouseResponse fromPb(EditWarehouseResponsePb pb) { + EditWarehouseResponse model = new EditWarehouseResponse(); + + return model; + } + + public static class EditWarehouseResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + EditWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EditWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EditWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public EditWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EditWarehouseResponsePb pb = mapper.readValue(p, EditWarehouseResponsePb.class); + return EditWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponsePb.java new file mode 100755 index 000000000..a0a06351a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class EditWarehouseResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EditWarehouseResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java index 1edb28c43..b43c6143b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java @@ -4,6 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -11,6 +21,8 @@ * right now. */ @Generated +@JsonSerialize(using = Empty.EmptySerializer.class) +@JsonDeserialize(using = Empty.EmptyDeserializer.class) public class Empty { @Override @@ -29,4 +41,35 @@ public int hashCode() { public String toString() { return new ToStringer(Empty.class).toString(); } + + EmptyPb toPb() { + EmptyPb pb = new EmptyPb(); + + return pb; + } + + static Empty fromPb(EmptyPb pb) { + Empty model = new Empty(); + + return model; + } + + public static class EmptySerializer extends JsonSerializer { + @Override + public void serialize(Empty value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmptyPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmptyDeserializer extends JsonDeserializer { + @Override + public Empty deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmptyPb pb = mapper.readValue(p, EmptyPb.class); + return Empty.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EmptyPb.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EmptyPb.java index 8714d62a6..dc5852cbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EmptyPb.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -11,7 +11,7 @@ * right now. */ @Generated -public class Empty { +class EmptyPb { @Override public boolean equals(Object o) { @@ -27,6 +27,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(Empty.class).toString(); + return new ToStringer(EmptyPb.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java index 5373b6508..3c4cfe8d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointConfPair.EndpointConfPairSerializer.class) +@JsonDeserialize(using = EndpointConfPair.EndpointConfPairDeserializer.class) public class EndpointConfPair { /** */ - @JsonProperty("key") private String key; /** */ - @JsonProperty("value") private String value; public EndpointConfPair setKey(String key) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(EndpointConfPair.class).add("key", key).add("value", value).toString(); } + + EndpointConfPairPb toPb() { + EndpointConfPairPb pb = new EndpointConfPairPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static EndpointConfPair fromPb(EndpointConfPairPb pb) { + EndpointConfPair model = new EndpointConfPair(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class EndpointConfPairSerializer extends JsonSerializer { + @Override + public void serialize(EndpointConfPair value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointConfPairPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointConfPairDeserializer extends JsonDeserializer { + @Override + public EndpointConfPair deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointConfPairPb pb = mapper.readValue(p, EndpointConfPairPb.class); + return EndpointConfPair.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPairPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPairPb.java new file mode 100755 index 000000000..af361e78c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPairPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointConfPairPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public EndpointConfPairPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public EndpointConfPairPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointConfPairPb that = (EndpointConfPairPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(EndpointConfPairPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java index ad4444a8a..c71539ac8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java @@ -4,32 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointHealth.EndpointHealthSerializer.class) +@JsonDeserialize(using = EndpointHealth.EndpointHealthDeserializer.class) public class EndpointHealth { /** Details about errors that are causing current degraded/failed status. */ - @JsonProperty("details") private String details; /** * The reason for failure to bring up clusters for this warehouse. This is available when status * is 'FAILED' and sometimes when it is DEGRADED. */ - @JsonProperty("failure_reason") private TerminationReason failureReason; /** Deprecated. split into summary and details for security */ - @JsonProperty("message") private String message; /** Health status of the warehouse. */ - @JsonProperty("status") private Status status; /** A short summary of the health status in case of degraded/failed warehouses. */ - @JsonProperty("summary") private String summary; public EndpointHealth setDetails(String details) { @@ -104,4 +110,46 @@ public String toString() { .add("summary", summary) .toString(); } + + EndpointHealthPb toPb() { + EndpointHealthPb pb = new EndpointHealthPb(); + pb.setDetails(details); + pb.setFailureReason(failureReason); + pb.setMessage(message); + pb.setStatus(status); + pb.setSummary(summary); + + return pb; + } + + static EndpointHealth fromPb(EndpointHealthPb pb) { + EndpointHealth model = new EndpointHealth(); + model.setDetails(pb.getDetails()); + model.setFailureReason(pb.getFailureReason()); + model.setMessage(pb.getMessage()); + model.setStatus(pb.getStatus()); + model.setSummary(pb.getSummary()); + + return model; + } + + public static class EndpointHealthSerializer extends JsonSerializer { + @Override + public void serialize(EndpointHealth value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointHealthPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointHealthDeserializer extends JsonDeserializer { + @Override + public EndpointHealth deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointHealthPb pb = mapper.readValue(p, EndpointHealthPb.class); + return EndpointHealth.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealthPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealthPb.java new file mode 100755 index 000000000..47d6b4f3c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealthPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointHealthPb { + @JsonProperty("details") + private String details; + + @JsonProperty("failure_reason") + private TerminationReason failureReason; + + @JsonProperty("message") + private String message; + + @JsonProperty("status") + private Status status; + + @JsonProperty("summary") + private String summary; + + public EndpointHealthPb setDetails(String details) { + this.details = details; + return this; + } + + public String getDetails() { + return details; + } + + public EndpointHealthPb setFailureReason(TerminationReason failureReason) { + this.failureReason = failureReason; + return this; + } + + public TerminationReason getFailureReason() { + return failureReason; + } + + public EndpointHealthPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public EndpointHealthPb setStatus(Status status) { + this.status = status; + return this; + } + + public Status getStatus() { + return status; + } + + public EndpointHealthPb setSummary(String summary) { + this.summary = summary; + return this; + } + + public String getSummary() { + return summary; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointHealthPb that = (EndpointHealthPb) o; + return Objects.equals(details, that.details) + && Objects.equals(failureReason, that.failureReason) + && Objects.equals(message, that.message) + && Objects.equals(status, that.status) + && Objects.equals(summary, that.summary); + } + + @Override + public int hashCode() { + return Objects.hash(details, failureReason, message, status, summary); + } + + @Override + public String toString() { + return new ToStringer(EndpointHealthPb.class) + .add("details", details) + .add("failureReason", failureReason) + .add("message", message) + .add("status", status) + .add("summary", summary) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java index bd56e2472..9d8f97790 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointInfo.EndpointInfoSerializer.class) +@JsonDeserialize(using = EndpointInfo.EndpointInfoDeserializer.class) public class EndpointInfo { /** * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) @@ -17,11 +28,9 @@ public class EndpointInfo { * *

Defaults to 120 mins */ - @JsonProperty("auto_stop_mins") private Long autoStopMins; /** Channel Details */ - @JsonProperty("channel") private Channel channel; /** @@ -32,11 +41,9 @@ public class EndpointInfo { *

Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - * 3X-Large - 4X-Large */ - @JsonProperty("cluster_size") private String clusterSize; /** warehouse creator name */ - @JsonProperty("creator_name") private String creatorName; /** @@ -44,27 +51,21 @@ public class EndpointInfo { * *

Defaults to false. */ - @JsonProperty("enable_photon") private Boolean enablePhoton; /** Configures whether the warehouse should use serverless compute */ - @JsonProperty("enable_serverless_compute") private Boolean enableServerlessCompute; /** Optional health status. Assume the warehouse is healthy if this field is not set. */ - @JsonProperty("health") private EndpointHealth health; /** unique identifier for warehouse */ - @JsonProperty("id") private String id; /** Deprecated. Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** the jdbc connection string for this warehouse */ - @JsonProperty("jdbc_url") private String jdbcUrl; /** @@ -74,7 +75,6 @@ public class EndpointInfo { * *

Defaults to min_clusters if unset. */ - @JsonProperty("max_num_clusters") private Long maxNumClusters; /** @@ -87,7 +87,6 @@ public class EndpointInfo { * *

Defaults to 1 */ - @JsonProperty("min_num_clusters") private Long minNumClusters; /** @@ -95,27 +94,21 @@ public class EndpointInfo { * *

Supported values: - Must be unique within an org. - Must be less than 100 characters. */ - @JsonProperty("name") private String name; /** Deprecated. current number of active sessions for the warehouse */ - @JsonProperty("num_active_sessions") private Long numActiveSessions; /** current number of clusters running for the service */ - @JsonProperty("num_clusters") private Long numClusters; /** ODBC parameters for the SQL warehouse */ - @JsonProperty("odbc_params") private OdbcParams odbcParams; /** Configurations whether the warehouse should use spot instances. */ - @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; /** State of the warehouse */ - @JsonProperty("state") private State state; /** @@ -124,14 +117,12 @@ public class EndpointInfo { * *

Supported values: - Number of tags < 45. */ - @JsonProperty("tags") private EndpointTags tags; /** * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to * `PRO` and also set the field `enable_serverless_compute` to `true`. */ - @JsonProperty("warehouse_type") private EndpointInfoWarehouseType warehouseType; public EndpointInfo setAutoStopMins(Long autoStopMins) { @@ -391,4 +382,75 @@ public String toString() { .add("warehouseType", warehouseType) .toString(); } + + EndpointInfoPb toPb() { + EndpointInfoPb pb = new EndpointInfoPb(); + pb.setAutoStopMins(autoStopMins); + pb.setChannel(channel); + pb.setClusterSize(clusterSize); + pb.setCreatorName(creatorName); + pb.setEnablePhoton(enablePhoton); + pb.setEnableServerlessCompute(enableServerlessCompute); + pb.setHealth(health); + pb.setId(id); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setJdbcUrl(jdbcUrl); + pb.setMaxNumClusters(maxNumClusters); + pb.setMinNumClusters(minNumClusters); + pb.setName(name); + pb.setNumActiveSessions(numActiveSessions); + pb.setNumClusters(numClusters); + pb.setOdbcParams(odbcParams); + pb.setSpotInstancePolicy(spotInstancePolicy); + pb.setState(state); + pb.setTags(tags); + pb.setWarehouseType(warehouseType); + + return pb; + } + + static EndpointInfo fromPb(EndpointInfoPb pb) { + EndpointInfo model = new EndpointInfo(); + model.setAutoStopMins(pb.getAutoStopMins()); + model.setChannel(pb.getChannel()); + model.setClusterSize(pb.getClusterSize()); + model.setCreatorName(pb.getCreatorName()); + model.setEnablePhoton(pb.getEnablePhoton()); + model.setEnableServerlessCompute(pb.getEnableServerlessCompute()); + model.setHealth(pb.getHealth()); + model.setId(pb.getId()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setJdbcUrl(pb.getJdbcUrl()); + model.setMaxNumClusters(pb.getMaxNumClusters()); + model.setMinNumClusters(pb.getMinNumClusters()); + model.setName(pb.getName()); + model.setNumActiveSessions(pb.getNumActiveSessions()); + model.setNumClusters(pb.getNumClusters()); + model.setOdbcParams(pb.getOdbcParams()); + model.setSpotInstancePolicy(pb.getSpotInstancePolicy()); + model.setState(pb.getState()); + model.setTags(pb.getTags()); + model.setWarehouseType(pb.getWarehouseType()); + + return model; + } + + public static class EndpointInfoSerializer extends JsonSerializer { + @Override + public void serialize(EndpointInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointInfoDeserializer extends JsonDeserializer { + @Override + public EndpointInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointInfoPb pb = mapper.readValue(p, EndpointInfoPb.class); + return EndpointInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoPb.java new file mode 100755 index 000000000..96bc26976 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoPb.java @@ -0,0 +1,329 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointInfoPb { + @JsonProperty("auto_stop_mins") + private Long autoStopMins; + + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("cluster_size") + private String clusterSize; + + @JsonProperty("creator_name") + private String creatorName; + + @JsonProperty("enable_photon") + private Boolean enablePhoton; + + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + + @JsonProperty("health") + private EndpointHealth health; + + @JsonProperty("id") + private String id; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("jdbc_url") + private String jdbcUrl; + + @JsonProperty("max_num_clusters") + private Long maxNumClusters; + + @JsonProperty("min_num_clusters") + private Long minNumClusters; + + @JsonProperty("name") + private String name; + + @JsonProperty("num_active_sessions") + private Long numActiveSessions; + + @JsonProperty("num_clusters") + private Long numClusters; + + @JsonProperty("odbc_params") + private OdbcParams odbcParams; + + @JsonProperty("spot_instance_policy") + private SpotInstancePolicy spotInstancePolicy; + + @JsonProperty("state") + private State state; + + @JsonProperty("tags") + private EndpointTags tags; + + @JsonProperty("warehouse_type") + private EndpointInfoWarehouseType warehouseType; + + public EndpointInfoPb setAutoStopMins(Long autoStopMins) { + this.autoStopMins = autoStopMins; + return this; + } + + public Long getAutoStopMins() { + return autoStopMins; + } + + public EndpointInfoPb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public EndpointInfoPb setClusterSize(String clusterSize) { + this.clusterSize = clusterSize; + return this; + } + + public String getClusterSize() { + return clusterSize; + } + + public EndpointInfoPb setCreatorName(String creatorName) { + this.creatorName = creatorName; + return this; + } + + public String getCreatorName() { + return creatorName; + } + + public EndpointInfoPb setEnablePhoton(Boolean enablePhoton) { + this.enablePhoton = enablePhoton; + return this; + } + + public Boolean getEnablePhoton() { + return enablePhoton; + } + + public EndpointInfoPb setEnableServerlessCompute(Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + + public EndpointInfoPb setHealth(EndpointHealth health) { + this.health = health; + return this; + } + + public EndpointHealth getHealth() { + return health; + } + + public EndpointInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public EndpointInfoPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public EndpointInfoPb setJdbcUrl(String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + return this; + } + + public String getJdbcUrl() { + return jdbcUrl; + } + + public EndpointInfoPb setMaxNumClusters(Long maxNumClusters) { + this.maxNumClusters = maxNumClusters; + return this; + } + + public Long getMaxNumClusters() { + return maxNumClusters; + } + + public EndpointInfoPb setMinNumClusters(Long minNumClusters) { + this.minNumClusters = minNumClusters; + return this; + } + + public Long getMinNumClusters() { + return minNumClusters; + } + + public EndpointInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EndpointInfoPb setNumActiveSessions(Long numActiveSessions) { + this.numActiveSessions = numActiveSessions; + return this; + } + + public Long getNumActiveSessions() { + return numActiveSessions; + } + + public EndpointInfoPb setNumClusters(Long numClusters) { + this.numClusters = numClusters; + return this; + } + + public Long getNumClusters() { + return numClusters; + } + + public EndpointInfoPb setOdbcParams(OdbcParams odbcParams) { + this.odbcParams = odbcParams; + return this; + } + + public OdbcParams getOdbcParams() { + return odbcParams; + } + + public EndpointInfoPb setSpotInstancePolicy(SpotInstancePolicy spotInstancePolicy) { + this.spotInstancePolicy = spotInstancePolicy; + return this; + } + + public SpotInstancePolicy getSpotInstancePolicy() { + return spotInstancePolicy; + } + + public EndpointInfoPb setState(State state) { + this.state = state; + return this; + } + + public State getState() { + return state; + } + + public EndpointInfoPb setTags(EndpointTags tags) { + this.tags = tags; + return this; + } + + public EndpointTags getTags() { + return tags; + } + + public EndpointInfoPb setWarehouseType(EndpointInfoWarehouseType warehouseType) { + this.warehouseType = warehouseType; + return this; + } + + public EndpointInfoWarehouseType getWarehouseType() { + return warehouseType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointInfoPb that = (EndpointInfoPb) o; + return Objects.equals(autoStopMins, that.autoStopMins) + && Objects.equals(channel, that.channel) + && Objects.equals(clusterSize, that.clusterSize) + && Objects.equals(creatorName, that.creatorName) + && Objects.equals(enablePhoton, that.enablePhoton) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) + && Objects.equals(health, that.health) + && Objects.equals(id, that.id) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(jdbcUrl, that.jdbcUrl) + && Objects.equals(maxNumClusters, that.maxNumClusters) + && Objects.equals(minNumClusters, that.minNumClusters) + && Objects.equals(name, that.name) + && Objects.equals(numActiveSessions, that.numActiveSessions) + && Objects.equals(numClusters, that.numClusters) + && Objects.equals(odbcParams, that.odbcParams) + && Objects.equals(spotInstancePolicy, that.spotInstancePolicy) + && Objects.equals(state, that.state) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseType, that.warehouseType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoStopMins, + channel, + clusterSize, + creatorName, + enablePhoton, + enableServerlessCompute, + health, + id, + instanceProfileArn, + jdbcUrl, + maxNumClusters, + minNumClusters, + name, + numActiveSessions, + numClusters, + odbcParams, + spotInstancePolicy, + state, + tags, + warehouseType); + } + + @Override + public String toString() { + return new ToStringer(EndpointInfoPb.class) + .add("autoStopMins", autoStopMins) + .add("channel", channel) + .add("clusterSize", clusterSize) + .add("creatorName", creatorName) + .add("enablePhoton", enablePhoton) + .add("enableServerlessCompute", enableServerlessCompute) + .add("health", health) + .add("id", id) + .add("instanceProfileArn", instanceProfileArn) + .add("jdbcUrl", jdbcUrl) + .add("maxNumClusters", maxNumClusters) + .add("minNumClusters", minNumClusters) + .add("name", name) + .add("numActiveSessions", numActiveSessions) + .add("numClusters", numClusters) + .add("odbcParams", odbcParams) + .add("spotInstancePolicy", spotInstancePolicy) + .add("state", state) + .add("tags", tags) + .add("warehouseType", warehouseType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPair.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPair.java index 115249962..a08a90b17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPair.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPair.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointTagPair.EndpointTagPairSerializer.class) +@JsonDeserialize(using = EndpointTagPair.EndpointTagPairDeserializer.class) public class EndpointTagPair { /** */ - @JsonProperty("key") private String key; /** */ - @JsonProperty("value") private String value; public EndpointTagPair setKey(String key) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(EndpointTagPair.class).add("key", key).add("value", value).toString(); } + + EndpointTagPairPb toPb() { + EndpointTagPairPb pb = new EndpointTagPairPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static EndpointTagPair fromPb(EndpointTagPairPb pb) { + EndpointTagPair model = new EndpointTagPair(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class EndpointTagPairSerializer extends JsonSerializer { + @Override + public void serialize(EndpointTagPair value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointTagPairPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointTagPairDeserializer extends JsonDeserializer { + @Override + public EndpointTagPair deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointTagPairPb pb = mapper.readValue(p, EndpointTagPairPb.class); + return EndpointTagPair.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPairPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPairPb.java new file mode 100755 index 000000000..00fe37ea2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagPairPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EndpointTagPairPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public EndpointTagPairPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public EndpointTagPairPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointTagPairPb that = (EndpointTagPairPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(EndpointTagPairPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTags.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTags.java index 9c49de20b..90ae7b881 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTags.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTags.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointTags.EndpointTagsSerializer.class) +@JsonDeserialize(using = EndpointTags.EndpointTagsDeserializer.class) public class EndpointTags { /** */ - @JsonProperty("custom_tags") private Collection customTags; public EndpointTags setCustomTags(Collection customTags) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(EndpointTags.class).add("customTags", customTags).toString(); } + + EndpointTagsPb toPb() { + EndpointTagsPb pb = new EndpointTagsPb(); + pb.setCustomTags(customTags); + + return pb; + } + + static EndpointTags fromPb(EndpointTagsPb pb) { + EndpointTags model = new EndpointTags(); + model.setCustomTags(pb.getCustomTags()); + + return model; + } + + public static class EndpointTagsSerializer extends JsonSerializer { + @Override + public void serialize(EndpointTags value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointTagsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointTagsDeserializer extends JsonDeserializer { + @Override + public EndpointTags deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointTagsPb pb = mapper.readValue(p, EndpointTagsPb.class); + return EndpointTags.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagsPb.java new file mode 100755 index 000000000..85056cbca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointTagsPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointTagsPb { + @JsonProperty("custom_tags") + private Collection customTags; + + public EndpointTagsPb setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointTagsPb that = (EndpointTagsPb) o; + return Objects.equals(customTags, that.customTags); + } + + @Override + public int hashCode() { + return Objects.hash(customTags); + } + + @Override + public String toString() { + return new ToStringer(EndpointTagsPb.class).add("customTags", customTags).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValue.java index 569ac8e25..60dae33ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValue.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EnumValue.EnumValueSerializer.class) +@JsonDeserialize(using = EnumValue.EnumValueDeserializer.class) public class EnumValue { /** List of valid query parameter values, newline delimited. */ - @JsonProperty("enum_options") private String enumOptions; /** If specified, allows multiple values to be selected for this parameter. */ - @JsonProperty("multi_values_options") private MultiValuesOptions multiValuesOptions; /** List of selected query parameter values. */ - @JsonProperty("values") private Collection values; public EnumValue setEnumOptions(String enumOptions) { @@ -72,4 +80,41 @@ public String toString() { .add("values", values) .toString(); } + + EnumValuePb toPb() { + EnumValuePb pb = new EnumValuePb(); + pb.setEnumOptions(enumOptions); + pb.setMultiValuesOptions(multiValuesOptions); + pb.setValues(values); + + return pb; + } + + static EnumValue fromPb(EnumValuePb pb) { + EnumValue model = new EnumValue(); + model.setEnumOptions(pb.getEnumOptions()); + model.setMultiValuesOptions(pb.getMultiValuesOptions()); + model.setValues(pb.getValues()); + + return model; + } + + public static class EnumValueSerializer extends JsonSerializer { + @Override + public void serialize(EnumValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EnumValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EnumValueDeserializer extends JsonDeserializer { + @Override + public EnumValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EnumValuePb pb = mapper.readValue(p, EnumValuePb.class); + return EnumValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValuePb.java new file mode 100755 index 000000000..655915e0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValuePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EnumValuePb { + @JsonProperty("enum_options") + private String enumOptions; + + @JsonProperty("multi_values_options") + private MultiValuesOptions multiValuesOptions; + + @JsonProperty("values") + private Collection values; + + public EnumValuePb setEnumOptions(String enumOptions) { + this.enumOptions = enumOptions; + return this; + } + + public String getEnumOptions() { + return enumOptions; + } + + public EnumValuePb setMultiValuesOptions(MultiValuesOptions multiValuesOptions) { + this.multiValuesOptions = multiValuesOptions; + return this; + } + + public MultiValuesOptions getMultiValuesOptions() { + return multiValuesOptions; + } + + public EnumValuePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnumValuePb that = (EnumValuePb) o; + return Objects.equals(enumOptions, that.enumOptions) + && Objects.equals(multiValuesOptions, that.multiValuesOptions) + && Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(enumOptions, multiValuesOptions, values); + } + + @Override + public String toString() { + return new ToStringer(EnumValuePb.class) + .add("enumOptions", enumOptions) + .add("multiValuesOptions", multiValuesOptions) + .add("values", values) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java index de8ea5aec..91aa4ac83 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ExecuteStatementRequest.ExecuteStatementRequestSerializer.class) +@JsonDeserialize(using = ExecuteStatementRequest.ExecuteStatementRequestDeserializer.class) public class ExecuteStatementRequest { /** * Applies the given byte limit to the statement's result size. Byte counts are based on internal @@ -17,7 +28,6 @@ public class ExecuteStatementRequest { * When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if * `byte_limit` is not explcitly set. */ - @JsonProperty("byte_limit") private Long byteLimit; /** @@ -26,11 +36,9 @@ public class ExecuteStatementRequest { *

[`USE CATALOG`]: * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html */ - @JsonProperty("catalog") private String catalog; /** */ - @JsonProperty("disposition") private Disposition disposition; /** @@ -66,7 +74,6 @@ public class ExecuteStatementRequest { * https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: * https://www.rfc-editor.org/rfc/rfc4180 */ - @JsonProperty("format") private Format format; /** @@ -77,7 +84,6 @@ public class ExecuteStatementRequest { * :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is * canceled and the call returns with a `CANCELED` state. */ - @JsonProperty("on_wait_timeout") private ExecuteStatementRequestOnWaitTimeout onWaitTimeout; /** @@ -110,7 +116,6 @@ public class ExecuteStatementRequest { * https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` * function]: https://docs.databricks.com/sql/language-manual/functions/cast.html */ - @JsonProperty("parameters") private Collection parameters; /** @@ -118,7 +123,6 @@ public class ExecuteStatementRequest { * SQL, it also sets the `truncated` field in the response to indicate whether the result was * trimmed due to the limit or not. */ - @JsonProperty("row_limit") private Long rowLimit; /** @@ -127,13 +131,11 @@ public class ExecuteStatementRequest { *

[`USE SCHEMA`]: * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html */ - @JsonProperty("schema") private String schema; /** * The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. */ - @JsonProperty("statement") private String statement; /** @@ -150,7 +152,6 @@ public class ExecuteStatementRequest { * execution error). If the statement takes longer to execute, `on_wait_timeout` determines what * should happen after the timeout is reached. */ - @JsonProperty("wait_timeout") private String waitTimeout; /** @@ -158,7 +159,6 @@ public class ExecuteStatementRequest { * *

[What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html */ - @JsonProperty("warehouse_id") private String warehouseId; public ExecuteStatementRequest setByteLimit(Long byteLimit) { @@ -311,4 +311,61 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + ExecuteStatementRequestPb toPb() { + ExecuteStatementRequestPb pb = new ExecuteStatementRequestPb(); + pb.setByteLimit(byteLimit); + pb.setCatalog(catalog); + pb.setDisposition(disposition); + pb.setFormat(format); + pb.setOnWaitTimeout(onWaitTimeout); + pb.setParameters(parameters); + pb.setRowLimit(rowLimit); + pb.setSchema(schema); + pb.setStatement(statement); + pb.setWaitTimeout(waitTimeout); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static ExecuteStatementRequest fromPb(ExecuteStatementRequestPb pb) { + ExecuteStatementRequest model = new ExecuteStatementRequest(); + model.setByteLimit(pb.getByteLimit()); + model.setCatalog(pb.getCatalog()); + model.setDisposition(pb.getDisposition()); + model.setFormat(pb.getFormat()); + model.setOnWaitTimeout(pb.getOnWaitTimeout()); + model.setParameters(pb.getParameters()); + model.setRowLimit(pb.getRowLimit()); + model.setSchema(pb.getSchema()); + model.setStatement(pb.getStatement()); + model.setWaitTimeout(pb.getWaitTimeout()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class ExecuteStatementRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ExecuteStatementRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExecuteStatementRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExecuteStatementRequestDeserializer + extends JsonDeserializer { + @Override + public ExecuteStatementRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExecuteStatementRequestPb pb = mapper.readValue(p, ExecuteStatementRequestPb.class); + return ExecuteStatementRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequestPb.java new file mode 100755 index 000000000..717fbb82b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequestPb.java @@ -0,0 +1,197 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ExecuteStatementRequestPb { + @JsonProperty("byte_limit") + private Long byteLimit; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("disposition") + private Disposition disposition; + + @JsonProperty("format") + private Format format; + + @JsonProperty("on_wait_timeout") + private ExecuteStatementRequestOnWaitTimeout onWaitTimeout; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("row_limit") + private Long rowLimit; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("statement") + private String statement; + + @JsonProperty("wait_timeout") + private String waitTimeout; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public ExecuteStatementRequestPb setByteLimit(Long byteLimit) { + this.byteLimit = byteLimit; + return this; + } + + public Long getByteLimit() { + return byteLimit; + } + + public ExecuteStatementRequestPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public ExecuteStatementRequestPb setDisposition(Disposition disposition) { + this.disposition = disposition; + return this; + } + + public Disposition getDisposition() { + return disposition; + } + + public ExecuteStatementRequestPb setFormat(Format format) { + this.format = format; + return this; + } + + public Format getFormat() { + return format; + } + + public ExecuteStatementRequestPb setOnWaitTimeout( + ExecuteStatementRequestOnWaitTimeout onWaitTimeout) { + this.onWaitTimeout = onWaitTimeout; + return this; + } + + public ExecuteStatementRequestOnWaitTimeout getOnWaitTimeout() { + return onWaitTimeout; + } + + public ExecuteStatementRequestPb setParameters( + Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public ExecuteStatementRequestPb setRowLimit(Long rowLimit) { + this.rowLimit = rowLimit; + return this; + } + + public Long getRowLimit() { + return rowLimit; + } + + public ExecuteStatementRequestPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public ExecuteStatementRequestPb setStatement(String statement) { + this.statement = statement; + return this; + } + + public String getStatement() { + return statement; + } + + public ExecuteStatementRequestPb setWaitTimeout(String waitTimeout) { + this.waitTimeout = waitTimeout; + return this; + } + + public String getWaitTimeout() { + return waitTimeout; + } + + public ExecuteStatementRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExecuteStatementRequestPb that = (ExecuteStatementRequestPb) o; + return Objects.equals(byteLimit, that.byteLimit) + && Objects.equals(catalog, that.catalog) + && Objects.equals(disposition, that.disposition) + && Objects.equals(format, that.format) + && Objects.equals(onWaitTimeout, that.onWaitTimeout) + && Objects.equals(parameters, that.parameters) + && Objects.equals(rowLimit, that.rowLimit) + && Objects.equals(schema, that.schema) + && Objects.equals(statement, that.statement) + && Objects.equals(waitTimeout, that.waitTimeout) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + byteLimit, + catalog, + disposition, + format, + onWaitTimeout, + parameters, + rowLimit, + schema, + statement, + waitTimeout, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(ExecuteStatementRequestPb.class) + .add("byteLimit", byteLimit) + .add("catalog", catalog) + .add("disposition", disposition) + .add("format", format) + .add("onWaitTimeout", onWaitTimeout) + .add("parameters", parameters) + .add("rowLimit", rowLimit) + .add("schema", schema) + .add("statement", statement) + .add("waitTimeout", waitTimeout) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java index 1b88216f2..95fdbe0b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java @@ -4,32 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalLink.ExternalLinkSerializer.class) +@JsonDeserialize(using = ExternalLink.ExternalLinkDeserializer.class) public class ExternalLink { /** * The number of bytes in the result chunk. This field is not available when using `INLINE` * disposition. */ - @JsonProperty("byte_count") private Long byteCount; /** The position within the sequence of result set chunks. */ - @JsonProperty("chunk_index") private Long chunkIndex; /** * Indicates the date-time that the given external link will expire and becomes invalid, after * which point a new `external_link` must be requested. */ - @JsonProperty("expiration") private String expiration; /** */ - @JsonProperty("external_link") private String externalLink; /** @@ -38,7 +45,6 @@ public class ExternalLink { * external service. The values of these headers should be considered sensitive and the client * should not expose these values in a log. */ - @JsonProperty("http_headers") private Map httpHeaders; /** @@ -46,7 +52,6 @@ public class ExternalLink { * no more chunks. The next chunk can be fetched with a * :method:statementexecution/getStatementResultChunkN request. */ - @JsonProperty("next_chunk_index") private Long nextChunkIndex; /** @@ -54,15 +59,12 @@ public class ExternalLink { * more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and * should be treated as an opaque link. This is an alternative to using `next_chunk_index`. */ - @JsonProperty("next_chunk_internal_link") private String nextChunkInternalLink; /** The number of rows within the result chunk. */ - @JsonProperty("row_count") private Long rowCount; /** The starting row offset within the result set. */ - @JsonProperty("row_offset") private Long rowOffset; public ExternalLink setByteCount(Long byteCount) { @@ -190,4 +192,53 @@ public String toString() { .add("rowOffset", rowOffset) .toString(); } + + ExternalLinkPb toPb() { + ExternalLinkPb pb = new ExternalLinkPb(); + pb.setByteCount(byteCount); + pb.setChunkIndex(chunkIndex); + pb.setExpiration(expiration); + pb.setExternalLink(externalLink); + pb.setHttpHeaders(httpHeaders); + pb.setNextChunkIndex(nextChunkIndex); + pb.setNextChunkInternalLink(nextChunkInternalLink); + pb.setRowCount(rowCount); + pb.setRowOffset(rowOffset); + + return pb; + } + + static ExternalLink fromPb(ExternalLinkPb pb) { + ExternalLink model = new ExternalLink(); + model.setByteCount(pb.getByteCount()); + model.setChunkIndex(pb.getChunkIndex()); + model.setExpiration(pb.getExpiration()); + model.setExternalLink(pb.getExternalLink()); + model.setHttpHeaders(pb.getHttpHeaders()); + model.setNextChunkIndex(pb.getNextChunkIndex()); + model.setNextChunkInternalLink(pb.getNextChunkInternalLink()); + model.setRowCount(pb.getRowCount()); + model.setRowOffset(pb.getRowOffset()); + + return model; + } + + public static class ExternalLinkSerializer extends JsonSerializer { + @Override + public void serialize(ExternalLink value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalLinkPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalLinkDeserializer extends JsonDeserializer { + @Override + public ExternalLink deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalLinkPb pb = mapper.readValue(p, ExternalLinkPb.class); + return ExternalLink.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLinkPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLinkPb.java new file mode 100755 index 000000000..21b82c71c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLinkPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class ExternalLinkPb { + @JsonProperty("byte_count") + private Long byteCount; + + @JsonProperty("chunk_index") + private Long chunkIndex; + + @JsonProperty("expiration") + private String expiration; + + @JsonProperty("external_link") + private String externalLink; + + @JsonProperty("http_headers") + private Map httpHeaders; + + @JsonProperty("next_chunk_index") + private Long nextChunkIndex; + + @JsonProperty("next_chunk_internal_link") + private String nextChunkInternalLink; + + @JsonProperty("row_count") + private Long rowCount; + + @JsonProperty("row_offset") + private Long rowOffset; + + public ExternalLinkPb setByteCount(Long byteCount) { + this.byteCount = byteCount; + return this; + } + + public Long getByteCount() { + return byteCount; + } + + public ExternalLinkPb setChunkIndex(Long chunkIndex) { + this.chunkIndex = chunkIndex; + return this; + } + + public Long getChunkIndex() { + return chunkIndex; + } + + public ExternalLinkPb setExpiration(String expiration) { + this.expiration = expiration; + return this; + } + + public String getExpiration() { + return expiration; + } + + public ExternalLinkPb setExternalLink(String externalLink) { + this.externalLink = externalLink; + return this; + } + + public String getExternalLink() { + return externalLink; + } + + public ExternalLinkPb setHttpHeaders(Map httpHeaders) { + this.httpHeaders = httpHeaders; + return this; + } + + public Map getHttpHeaders() { + return httpHeaders; + } + + public ExternalLinkPb setNextChunkIndex(Long nextChunkIndex) { + this.nextChunkIndex = nextChunkIndex; + return this; + } + + public Long getNextChunkIndex() { + return nextChunkIndex; + } + + public ExternalLinkPb setNextChunkInternalLink(String nextChunkInternalLink) { + this.nextChunkInternalLink = nextChunkInternalLink; + return this; + } + + public String getNextChunkInternalLink() { + return nextChunkInternalLink; + } + + public ExternalLinkPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + public ExternalLinkPb setRowOffset(Long rowOffset) { + this.rowOffset = rowOffset; + return this; + } + + public Long getRowOffset() { + return rowOffset; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLinkPb that = (ExternalLinkPb) o; + return Objects.equals(byteCount, that.byteCount) + && Objects.equals(chunkIndex, that.chunkIndex) + && Objects.equals(expiration, that.expiration) + && Objects.equals(externalLink, that.externalLink) + && Objects.equals(httpHeaders, that.httpHeaders) + && Objects.equals(nextChunkIndex, that.nextChunkIndex) + && Objects.equals(nextChunkInternalLink, that.nextChunkInternalLink) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(rowOffset, that.rowOffset); + } + + @Override + public int hashCode() { + return Objects.hash( + byteCount, + chunkIndex, + expiration, + externalLink, + httpHeaders, + nextChunkIndex, + nextChunkInternalLink, + rowCount, + rowOffset); + } + + @Override + public String toString() { + return new ToStringer(ExternalLinkPb.class) + .add("byteCount", byteCount) + .add("chunkIndex", chunkIndex) + .add("expiration", expiration) + .add("externalLink", externalLink) + .add("httpHeaders", httpHeaders) + .add("nextChunkIndex", nextChunkIndex) + .add("nextChunkInternalLink", nextChunkInternalLink) + .add("rowCount", rowCount) + .add("rowOffset", rowOffset) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java index 762b2f47a..6ce0475c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalQuerySource.ExternalQuerySourceSerializer.class) +@JsonDeserialize(using = ExternalQuerySource.ExternalQuerySourceDeserializer.class) public class ExternalQuerySource { /** The canonical identifier for this SQL alert */ - @JsonProperty("alert_id") private String alertId; /** The canonical identifier for this Lakeview dashboard */ - @JsonProperty("dashboard_id") private String dashboardId; /** The canonical identifier for this Genie space */ - @JsonProperty("genie_space_id") private String genieSpaceId; /** */ - @JsonProperty("job_info") private ExternalQuerySourceJobInfo jobInfo; /** The canonical identifier for this legacy dashboard */ - @JsonProperty("legacy_dashboard_id") private String legacyDashboardId; /** The canonical identifier for this notebook */ - @JsonProperty("notebook_id") private String notebookId; /** The canonical identifier for this SQL query */ - @JsonProperty("sql_query_id") private String sqlQueryId; public ExternalQuerySource setAlertId(String alertId) { @@ -132,4 +136,51 @@ public String toString() { .add("sqlQueryId", sqlQueryId) .toString(); } + + ExternalQuerySourcePb toPb() { + ExternalQuerySourcePb pb = new ExternalQuerySourcePb(); + pb.setAlertId(alertId); + pb.setDashboardId(dashboardId); + pb.setGenieSpaceId(genieSpaceId); + pb.setJobInfo(jobInfo); + pb.setLegacyDashboardId(legacyDashboardId); + pb.setNotebookId(notebookId); + pb.setSqlQueryId(sqlQueryId); + + return pb; + } + + static ExternalQuerySource fromPb(ExternalQuerySourcePb pb) { + ExternalQuerySource model = new ExternalQuerySource(); + model.setAlertId(pb.getAlertId()); + model.setDashboardId(pb.getDashboardId()); + model.setGenieSpaceId(pb.getGenieSpaceId()); + model.setJobInfo(pb.getJobInfo()); + model.setLegacyDashboardId(pb.getLegacyDashboardId()); + model.setNotebookId(pb.getNotebookId()); + model.setSqlQueryId(pb.getSqlQueryId()); + + return model; + } + + public static class ExternalQuerySourceSerializer extends JsonSerializer { + @Override + public void serialize(ExternalQuerySource value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalQuerySourcePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalQuerySourceDeserializer + extends JsonDeserializer { + @Override + public ExternalQuerySource deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalQuerySourcePb pb = mapper.readValue(p, ExternalQuerySourcePb.class); + return ExternalQuerySource.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java index 92a351a14..630a54977 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ExternalQuerySourceJobInfo.ExternalQuerySourceJobInfoSerializer.class) +@JsonDeserialize(using = ExternalQuerySourceJobInfo.ExternalQuerySourceJobInfoDeserializer.class) public class ExternalQuerySourceJobInfo { /** The canonical identifier for this job. */ - @JsonProperty("job_id") private String jobId; /** The canonical identifier of the run. This ID is unique across all runs of all jobs. */ - @JsonProperty("job_run_id") private String jobRunId; /** The canonical identifier of the task run. */ - @JsonProperty("job_task_run_id") private String jobTaskRunId; public ExternalQuerySourceJobInfo setJobId(String jobId) { @@ -71,4 +79,45 @@ public String toString() { .add("jobTaskRunId", jobTaskRunId) .toString(); } + + ExternalQuerySourceJobInfoPb toPb() { + ExternalQuerySourceJobInfoPb pb = new ExternalQuerySourceJobInfoPb(); + pb.setJobId(jobId); + pb.setJobRunId(jobRunId); + pb.setJobTaskRunId(jobTaskRunId); + + return pb; + } + + static ExternalQuerySourceJobInfo fromPb(ExternalQuerySourceJobInfoPb pb) { + ExternalQuerySourceJobInfo model = new ExternalQuerySourceJobInfo(); + model.setJobId(pb.getJobId()); + model.setJobRunId(pb.getJobRunId()); + model.setJobTaskRunId(pb.getJobTaskRunId()); + + return model; + } + + public static class ExternalQuerySourceJobInfoSerializer + extends JsonSerializer { + @Override + public void serialize( + ExternalQuerySourceJobInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExternalQuerySourceJobInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExternalQuerySourceJobInfoDeserializer + extends JsonDeserializer { + @Override + public ExternalQuerySourceJobInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExternalQuerySourceJobInfoPb pb = mapper.readValue(p, ExternalQuerySourceJobInfoPb.class); + return ExternalQuerySourceJobInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfoPb.java new file mode 100755 index 000000000..0265dda52 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfoPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalQuerySourceJobInfoPb { + @JsonProperty("job_id") + private String jobId; + + @JsonProperty("job_run_id") + private String jobRunId; + + @JsonProperty("job_task_run_id") + private String jobTaskRunId; + + public ExternalQuerySourceJobInfoPb setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + public ExternalQuerySourceJobInfoPb setJobRunId(String jobRunId) { + this.jobRunId = jobRunId; + return this; + } + + public String getJobRunId() { + return jobRunId; + } + + public ExternalQuerySourceJobInfoPb setJobTaskRunId(String jobTaskRunId) { + this.jobTaskRunId = jobTaskRunId; + return this; + } + + public String getJobTaskRunId() { + return jobTaskRunId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalQuerySourceJobInfoPb that = (ExternalQuerySourceJobInfoPb) o; + return Objects.equals(jobId, that.jobId) + && Objects.equals(jobRunId, that.jobRunId) + && Objects.equals(jobTaskRunId, that.jobTaskRunId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, jobRunId, jobTaskRunId); + } + + @Override + public String toString() { + return new ToStringer(ExternalQuerySourceJobInfoPb.class) + .add("jobId", jobId) + .add("jobRunId", jobRunId) + .add("jobTaskRunId", jobTaskRunId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourcePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourcePb.java new file mode 100755 index 000000000..79fc87faf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourcePb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ExternalQuerySourcePb { + @JsonProperty("alert_id") + private String alertId; + + @JsonProperty("dashboard_id") + private String dashboardId; + + @JsonProperty("genie_space_id") + private String genieSpaceId; + + @JsonProperty("job_info") + private ExternalQuerySourceJobInfo jobInfo; + + @JsonProperty("legacy_dashboard_id") + private String legacyDashboardId; + + @JsonProperty("notebook_id") + private String notebookId; + + @JsonProperty("sql_query_id") + private String sqlQueryId; + + public ExternalQuerySourcePb setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + public ExternalQuerySourcePb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ExternalQuerySourcePb setGenieSpaceId(String genieSpaceId) { + this.genieSpaceId = genieSpaceId; + return this; + } + + public String getGenieSpaceId() { + return genieSpaceId; + } + + public ExternalQuerySourcePb setJobInfo(ExternalQuerySourceJobInfo jobInfo) { + this.jobInfo = jobInfo; + return this; + } + + public ExternalQuerySourceJobInfo getJobInfo() { + return jobInfo; + } + + public ExternalQuerySourcePb setLegacyDashboardId(String legacyDashboardId) { + this.legacyDashboardId = legacyDashboardId; + return this; + } + + public String getLegacyDashboardId() { + return legacyDashboardId; + } + + public ExternalQuerySourcePb setNotebookId(String notebookId) { + this.notebookId = notebookId; + return this; + } + + public String getNotebookId() { + return notebookId; + } + + public ExternalQuerySourcePb setSqlQueryId(String sqlQueryId) { + this.sqlQueryId = sqlQueryId; + return this; + } + + public String getSqlQueryId() { + return sqlQueryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalQuerySourcePb that = (ExternalQuerySourcePb) o; + return Objects.equals(alertId, that.alertId) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(genieSpaceId, that.genieSpaceId) + && Objects.equals(jobInfo, that.jobInfo) + && Objects.equals(legacyDashboardId, that.legacyDashboardId) + && Objects.equals(notebookId, that.notebookId) + && Objects.equals(sqlQueryId, that.sqlQueryId); + } + + @Override + public int hashCode() { + return Objects.hash( + alertId, dashboardId, genieSpaceId, jobInfo, legacyDashboardId, notebookId, sqlQueryId); + } + + @Override + public String toString() { + return new ToStringer(ExternalQuerySourcePb.class) + .add("alertId", alertId) + .add("dashboardId", dashboardId) + .add("genieSpaceId", genieSpaceId) + .add("jobInfo", jobInfo) + .add("legacyDashboardId", legacyDashboardId) + .add("notebookId", notebookId) + .add("sqlQueryId", sqlQueryId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java index 2686c8252..0ec2a1fdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an alert */ @Generated +@JsonSerialize(using = GetAlertRequest.GetAlertRequestSerializer.class) +@JsonDeserialize(using = GetAlertRequest.GetAlertRequestDeserializer.class) public class GetAlertRequest { /** */ - @JsonIgnore private String id; + private String id; public GetAlertRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetAlertRequest.class).add("id", id).toString(); } + + GetAlertRequestPb toPb() { + GetAlertRequestPb pb = new GetAlertRequestPb(); + pb.setId(id); + + return pb; + } + + static GetAlertRequest fromPb(GetAlertRequestPb pb) { + GetAlertRequest model = new GetAlertRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetAlertRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetAlertRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAlertRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAlertRequestDeserializer extends JsonDeserializer { + @Override + public GetAlertRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAlertRequestPb pb = mapper.readValue(p, GetAlertRequestPb.class); + return GetAlertRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequestPb.java new file mode 100755 index 000000000..edf5f22dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an alert */ +@Generated +class GetAlertRequestPb { + @JsonIgnore private String id; + + public GetAlertRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAlertRequestPb that = (GetAlertRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetAlertRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java index b5c4f94f8..1caa7f80d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an alert */ @Generated +@JsonSerialize(using = GetAlertV2Request.GetAlertV2RequestSerializer.class) +@JsonDeserialize(using = GetAlertV2Request.GetAlertV2RequestDeserializer.class) public class GetAlertV2Request { /** */ - @JsonIgnore private String id; + private String id; public GetAlertV2Request setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetAlertV2Request.class).add("id", id).toString(); } + + GetAlertV2RequestPb toPb() { + GetAlertV2RequestPb pb = new GetAlertV2RequestPb(); + pb.setId(id); + + return pb; + } + + static GetAlertV2Request fromPb(GetAlertV2RequestPb pb) { + GetAlertV2Request model = new GetAlertV2Request(); + model.setId(pb.getId()); + + return model; + } + + public static class GetAlertV2RequestSerializer extends JsonSerializer { + @Override + public void serialize(GetAlertV2Request value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAlertV2RequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAlertV2RequestDeserializer extends JsonDeserializer { + @Override + public GetAlertV2Request deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAlertV2RequestPb pb = mapper.readValue(p, GetAlertV2RequestPb.class); + return GetAlertV2Request.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2RequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2RequestPb.java new file mode 100755 index 000000000..7cb32a734 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2RequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an alert */ +@Generated +class GetAlertV2RequestPb { + @JsonIgnore private String id; + + public GetAlertV2RequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAlertV2RequestPb that = (GetAlertV2RequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetAlertV2RequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java index a9651407e..a305f1b71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an alert */ @Generated +@JsonSerialize(using = GetAlertsLegacyRequest.GetAlertsLegacyRequestSerializer.class) +@JsonDeserialize(using = GetAlertsLegacyRequest.GetAlertsLegacyRequestDeserializer.class) public class GetAlertsLegacyRequest { /** */ - @JsonIgnore private String alertId; + private String alertId; public GetAlertsLegacyRequest setAlertId(String alertId) { this.alertId = alertId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetAlertsLegacyRequest.class).add("alertId", alertId).toString(); } + + GetAlertsLegacyRequestPb toPb() { + GetAlertsLegacyRequestPb pb = new GetAlertsLegacyRequestPb(); + pb.setAlertId(alertId); + + return pb; + } + + static GetAlertsLegacyRequest fromPb(GetAlertsLegacyRequestPb pb) { + GetAlertsLegacyRequest model = new GetAlertsLegacyRequest(); + model.setAlertId(pb.getAlertId()); + + return model; + } + + public static class GetAlertsLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetAlertsLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAlertsLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAlertsLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public GetAlertsLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAlertsLegacyRequestPb pb = mapper.readValue(p, GetAlertsLegacyRequestPb.class); + return GetAlertsLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequestPb.java new file mode 100755 index 000000000..4bbecd1ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an alert */ +@Generated +class GetAlertsLegacyRequestPb { + @JsonIgnore private String alertId; + + public GetAlertsLegacyRequestPb setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAlertsLegacyRequestPb that = (GetAlertsLegacyRequestPb) o; + return Objects.equals(alertId, that.alertId); + } + + @Override + public int hashCode() { + return Objects.hash(alertId); + } + + @Override + public String toString() { + return new ToStringer(GetAlertsLegacyRequestPb.class).add("alertId", alertId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequest.java index d1b95ad82..3e0512c66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Retrieve a definition */ @Generated +@JsonSerialize(using = GetDashboardRequest.GetDashboardRequestSerializer.class) +@JsonDeserialize(using = GetDashboardRequest.GetDashboardRequestDeserializer.class) public class GetDashboardRequest { /** */ - @JsonIgnore private String dashboardId; + private String dashboardId; public GetDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetDashboardRequest.class).add("dashboardId", dashboardId).toString(); } + + GetDashboardRequestPb toPb() { + GetDashboardRequestPb pb = new GetDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static GetDashboardRequest fromPb(GetDashboardRequestPb pb) { + GetDashboardRequest model = new GetDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class GetDashboardRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public GetDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDashboardRequestPb pb = mapper.readValue(p, GetDashboardRequestPb.class); + return GetDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequestPb.java new file mode 100755 index 000000000..0791bdaf2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve a definition */ +@Generated +class GetDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public GetDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDashboardRequestPb that = (GetDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(GetDashboardRequestPb.class).add("dashboardId", dashboardId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequest.java index c667495c9..2b8f95991 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get object ACL */ @Generated +@JsonSerialize(using = GetDbsqlPermissionRequest.GetDbsqlPermissionRequestSerializer.class) +@JsonDeserialize(using = GetDbsqlPermissionRequest.GetDbsqlPermissionRequestDeserializer.class) public class GetDbsqlPermissionRequest { /** Object ID. An ACL is returned for the object with this UUID. */ - @JsonIgnore private String objectId; + private String objectId; /** The type of object permissions to check. */ - @JsonIgnore private ObjectTypePlural objectType; + private ObjectTypePlural objectType; public GetDbsqlPermissionRequest setObjectId(String objectId) { this.objectId = objectId; @@ -54,4 +65,43 @@ public String toString() { .add("objectType", objectType) .toString(); } + + GetDbsqlPermissionRequestPb toPb() { + GetDbsqlPermissionRequestPb pb = new GetDbsqlPermissionRequestPb(); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static GetDbsqlPermissionRequest fromPb(GetDbsqlPermissionRequestPb pb) { + GetDbsqlPermissionRequest model = new GetDbsqlPermissionRequest(); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class GetDbsqlPermissionRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetDbsqlPermissionRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetDbsqlPermissionRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetDbsqlPermissionRequestDeserializer + extends JsonDeserializer { + @Override + public GetDbsqlPermissionRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetDbsqlPermissionRequestPb pb = mapper.readValue(p, GetDbsqlPermissionRequestPb.class); + return GetDbsqlPermissionRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequestPb.java new file mode 100755 index 000000000..7235dd99a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequestPb.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get object ACL */ +@Generated +class GetDbsqlPermissionRequestPb { + @JsonIgnore private String objectId; + + @JsonIgnore private ObjectTypePlural objectType; + + public GetDbsqlPermissionRequestPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetDbsqlPermissionRequestPb setObjectType(ObjectTypePlural objectType) { + this.objectType = objectType; + return this; + } + + public ObjectTypePlural getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDbsqlPermissionRequestPb that = (GetDbsqlPermissionRequestPb) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetDbsqlPermissionRequestPb.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequest.java index ee55a591e..ab6808169 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a query definition. */ @Generated +@JsonSerialize(using = GetQueriesLegacyRequest.GetQueriesLegacyRequestSerializer.class) +@JsonDeserialize(using = GetQueriesLegacyRequest.GetQueriesLegacyRequestDeserializer.class) public class GetQueriesLegacyRequest { /** */ - @JsonIgnore private String queryId; + private String queryId; public GetQueriesLegacyRequest setQueryId(String queryId) { this.queryId = queryId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetQueriesLegacyRequest.class).add("queryId", queryId).toString(); } + + GetQueriesLegacyRequestPb toPb() { + GetQueriesLegacyRequestPb pb = new GetQueriesLegacyRequestPb(); + pb.setQueryId(queryId); + + return pb; + } + + static GetQueriesLegacyRequest fromPb(GetQueriesLegacyRequestPb pb) { + GetQueriesLegacyRequest model = new GetQueriesLegacyRequest(); + model.setQueryId(pb.getQueryId()); + + return model; + } + + public static class GetQueriesLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetQueriesLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQueriesLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQueriesLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public GetQueriesLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQueriesLegacyRequestPb pb = mapper.readValue(p, GetQueriesLegacyRequestPb.class); + return GetQueriesLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequestPb.java new file mode 100755 index 000000000..2165a7076 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueriesLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a query definition. */ +@Generated +class GetQueriesLegacyRequestPb { + @JsonIgnore private String queryId; + + public GetQueriesLegacyRequestPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQueriesLegacyRequestPb that = (GetQueriesLegacyRequestPb) o; + return Objects.equals(queryId, that.queryId); + } + + @Override + public int hashCode() { + return Objects.hash(queryId); + } + + @Override + public String toString() { + return new ToStringer(GetQueriesLegacyRequestPb.class).add("queryId", queryId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequest.java index 57203d90c..1b118edeb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a query */ @Generated +@JsonSerialize(using = GetQueryRequest.GetQueryRequestSerializer.class) +@JsonDeserialize(using = GetQueryRequest.GetQueryRequestDeserializer.class) public class GetQueryRequest { /** */ - @JsonIgnore private String id; + private String id; public GetQueryRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetQueryRequest.class).add("id", id).toString(); } + + GetQueryRequestPb toPb() { + GetQueryRequestPb pb = new GetQueryRequestPb(); + pb.setId(id); + + return pb; + } + + static GetQueryRequest fromPb(GetQueryRequestPb pb) { + GetQueryRequest model = new GetQueryRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetQueryRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetQueryRequestDeserializer extends JsonDeserializer { + @Override + public GetQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetQueryRequestPb pb = mapper.readValue(p, GetQueryRequestPb.class); + return GetQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequestPb.java new file mode 100755 index 000000000..ef3d68e69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetQueryRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a query */ +@Generated +class GetQueryRequestPb { + @JsonIgnore private String id; + + public GetQueryRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQueryRequestPb that = (GetQueryRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetQueryRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponse.java index 4b52529ce..874429e70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponse.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = GetResponse.GetResponseSerializer.class) +@JsonDeserialize(using = GetResponse.GetResponseDeserializer.class) public class GetResponse { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** An object's type and UUID, separated by a forward slash (/) character. */ - @JsonProperty("object_id") private String objectId; /** A singular noun object type. */ - @JsonProperty("object_type") private ObjectType objectType; public GetResponse setAccessControlList(Collection accessControlList) { @@ -72,4 +80,41 @@ public String toString() { .add("objectType", objectType) .toString(); } + + GetResponsePb toPb() { + GetResponsePb pb = new GetResponsePb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static GetResponse fromPb(GetResponsePb pb) { + GetResponse model = new GetResponse(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class GetResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetResponseDeserializer extends JsonDeserializer { + @Override + public GetResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetResponsePb pb = mapper.readValue(p, GetResponsePb.class); + return GetResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponsePb.java new file mode 100755 index 000000000..c1f8010c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetResponsePb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private ObjectType objectType; + + public GetResponsePb setAccessControlList(Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public GetResponsePb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetResponsePb setObjectType(ObjectType objectType) { + this.objectType = objectType; + return this; + } + + public ObjectType getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetResponsePb that = (GetResponsePb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetResponsePb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequest.java index dee9724be..d4a4541ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequest.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get status, manifest, and result first chunk */ @Generated +@JsonSerialize(using = GetStatementRequest.GetStatementRequestSerializer.class) +@JsonDeserialize(using = GetStatementRequest.GetStatementRequestDeserializer.class) public class GetStatementRequest { /** * The statement ID is returned upon successfully submitting a SQL statement, and is a required * reference for all subsequent calls. */ - @JsonIgnore private String statementId; + private String statementId; public GetStatementRequest setStatementId(String statementId) { this.statementId = statementId; @@ -42,4 +53,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetStatementRequest.class).add("statementId", statementId).toString(); } + + GetStatementRequestPb toPb() { + GetStatementRequestPb pb = new GetStatementRequestPb(); + pb.setStatementId(statementId); + + return pb; + } + + static GetStatementRequest fromPb(GetStatementRequestPb pb) { + GetStatementRequest model = new GetStatementRequest(); + model.setStatementId(pb.getStatementId()); + + return model; + } + + public static class GetStatementRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetStatementRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStatementRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStatementRequestDeserializer + extends JsonDeserializer { + @Override + public GetStatementRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStatementRequestPb pb = mapper.readValue(p, GetStatementRequestPb.class); + return GetStatementRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequestPb.java new file mode 100755 index 000000000..9b9dbfda0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get status, manifest, and result first chunk */ +@Generated +class GetStatementRequestPb { + @JsonIgnore private String statementId; + + public GetStatementRequestPb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStatementRequestPb that = (GetStatementRequestPb) o; + return Objects.equals(statementId, that.statementId); + } + + @Override + public int hashCode() { + return Objects.hash(statementId); + } + + @Override + public String toString() { + return new ToStringer(GetStatementRequestPb.class).add("statementId", statementId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequest.java index bebbc06b7..8be57c596 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequest.java @@ -4,20 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get result chunk by index */ @Generated +@JsonSerialize( + using = GetStatementResultChunkNRequest.GetStatementResultChunkNRequestSerializer.class) +@JsonDeserialize( + using = GetStatementResultChunkNRequest.GetStatementResultChunkNRequestDeserializer.class) public class GetStatementResultChunkNRequest { /** */ - @JsonIgnore private Long chunkIndex; + private Long chunkIndex; /** * The statement ID is returned upon successfully submitting a SQL statement, and is a required * reference for all subsequent calls. */ - @JsonIgnore private String statementId; + private String statementId; public GetStatementResultChunkNRequest setChunkIndex(Long chunkIndex) { this.chunkIndex = chunkIndex; @@ -58,4 +71,44 @@ public String toString() { .add("statementId", statementId) .toString(); } + + GetStatementResultChunkNRequestPb toPb() { + GetStatementResultChunkNRequestPb pb = new GetStatementResultChunkNRequestPb(); + pb.setChunkIndex(chunkIndex); + pb.setStatementId(statementId); + + return pb; + } + + static GetStatementResultChunkNRequest fromPb(GetStatementResultChunkNRequestPb pb) { + GetStatementResultChunkNRequest model = new GetStatementResultChunkNRequest(); + model.setChunkIndex(pb.getChunkIndex()); + model.setStatementId(pb.getStatementId()); + + return model; + } + + public static class GetStatementResultChunkNRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetStatementResultChunkNRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStatementResultChunkNRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStatementResultChunkNRequestDeserializer + extends JsonDeserializer { + @Override + public GetStatementResultChunkNRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStatementResultChunkNRequestPb pb = + mapper.readValue(p, GetStatementResultChunkNRequestPb.class); + return GetStatementResultChunkNRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequestPb.java new file mode 100755 index 000000000..6f8f667e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetStatementResultChunkNRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get result chunk by index */ +@Generated +class GetStatementResultChunkNRequestPb { + @JsonIgnore private Long chunkIndex; + + @JsonIgnore private String statementId; + + public GetStatementResultChunkNRequestPb setChunkIndex(Long chunkIndex) { + this.chunkIndex = chunkIndex; + return this; + } + + public Long getChunkIndex() { + return chunkIndex; + } + + public GetStatementResultChunkNRequestPb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStatementResultChunkNRequestPb that = (GetStatementResultChunkNRequestPb) o; + return Objects.equals(chunkIndex, that.chunkIndex) + && Objects.equals(statementId, that.statementId); + } + + @Override + public int hashCode() { + return Objects.hash(chunkIndex, statementId); + } + + @Override + public String toString() { + return new ToStringer(GetStatementResultChunkNRequestPb.class) + .add("chunkIndex", chunkIndex) + .add("statementId", statementId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequest.java index 6e88fc62c..e72a3e9bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequest.java @@ -4,14 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get SQL warehouse permission levels */ @Generated +@JsonSerialize( + using = GetWarehousePermissionLevelsRequest.GetWarehousePermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = + GetWarehousePermissionLevelsRequest.GetWarehousePermissionLevelsRequestDeserializer.class) public class GetWarehousePermissionLevelsRequest { /** The SQL warehouse for which to get or manage permissions. */ - @JsonIgnore private String warehouseId; + private String warehouseId; public GetWarehousePermissionLevelsRequest setWarehouseId(String warehouseId) { this.warehouseId = warehouseId; @@ -41,4 +55,42 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + GetWarehousePermissionLevelsRequestPb toPb() { + GetWarehousePermissionLevelsRequestPb pb = new GetWarehousePermissionLevelsRequestPb(); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static GetWarehousePermissionLevelsRequest fromPb(GetWarehousePermissionLevelsRequestPb pb) { + GetWarehousePermissionLevelsRequest model = new GetWarehousePermissionLevelsRequest(); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class GetWarehousePermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWarehousePermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWarehousePermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWarehousePermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetWarehousePermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWarehousePermissionLevelsRequestPb pb = + mapper.readValue(p, GetWarehousePermissionLevelsRequestPb.class); + return GetWarehousePermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequestPb.java new file mode 100755 index 000000000..22892f535 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get SQL warehouse permission levels */ +@Generated +class GetWarehousePermissionLevelsRequestPb { + @JsonIgnore private String warehouseId; + + public GetWarehousePermissionLevelsRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWarehousePermissionLevelsRequestPb that = (GetWarehousePermissionLevelsRequestPb) o; + return Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(warehouseId); + } + + @Override + public String toString() { + return new ToStringer(GetWarehousePermissionLevelsRequestPb.class) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponse.java index ca890fbfc..c664f4846 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponse.java @@ -4,14 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetWarehousePermissionLevelsResponse.GetWarehousePermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetWarehousePermissionLevelsResponse.GetWarehousePermissionLevelsResponseDeserializer.class) public class GetWarehousePermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetWarehousePermissionLevelsResponse setPermissionLevels( @@ -43,4 +57,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetWarehousePermissionLevelsResponsePb toPb() { + GetWarehousePermissionLevelsResponsePb pb = new GetWarehousePermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetWarehousePermissionLevelsResponse fromPb(GetWarehousePermissionLevelsResponsePb pb) { + GetWarehousePermissionLevelsResponse model = new GetWarehousePermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetWarehousePermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWarehousePermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWarehousePermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWarehousePermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetWarehousePermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWarehousePermissionLevelsResponsePb pb = + mapper.readValue(p, GetWarehousePermissionLevelsResponsePb.class); + return GetWarehousePermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponsePb.java new file mode 100755 index 000000000..7ce6c3c2f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetWarehousePermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetWarehousePermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWarehousePermissionLevelsResponsePb that = (GetWarehousePermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetWarehousePermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequest.java index e5973ba12..2a91819d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get SQL warehouse permissions */ @Generated +@JsonSerialize( + using = GetWarehousePermissionsRequest.GetWarehousePermissionsRequestSerializer.class) +@JsonDeserialize( + using = GetWarehousePermissionsRequest.GetWarehousePermissionsRequestDeserializer.class) public class GetWarehousePermissionsRequest { /** The SQL warehouse for which to get or manage permissions. */ - @JsonIgnore private String warehouseId; + private String warehouseId; public GetWarehousePermissionsRequest setWarehouseId(String warehouseId) { this.warehouseId = warehouseId; @@ -41,4 +54,42 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + GetWarehousePermissionsRequestPb toPb() { + GetWarehousePermissionsRequestPb pb = new GetWarehousePermissionsRequestPb(); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static GetWarehousePermissionsRequest fromPb(GetWarehousePermissionsRequestPb pb) { + GetWarehousePermissionsRequest model = new GetWarehousePermissionsRequest(); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class GetWarehousePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWarehousePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWarehousePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWarehousePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetWarehousePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWarehousePermissionsRequestPb pb = + mapper.readValue(p, GetWarehousePermissionsRequestPb.class); + return GetWarehousePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequestPb.java new file mode 100755 index 000000000..b12242fc3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehousePermissionsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get SQL warehouse permissions */ +@Generated +class GetWarehousePermissionsRequestPb { + @JsonIgnore private String warehouseId; + + public GetWarehousePermissionsRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWarehousePermissionsRequestPb that = (GetWarehousePermissionsRequestPb) o; + return Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(warehouseId); + } + + @Override + public String toString() { + return new ToStringer(GetWarehousePermissionsRequestPb.class) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequest.java index b37f5204c..fba5467a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get warehouse info */ @Generated +@JsonSerialize(using = GetWarehouseRequest.GetWarehouseRequestSerializer.class) +@JsonDeserialize(using = GetWarehouseRequest.GetWarehouseRequestDeserializer.class) public class GetWarehouseRequest { /** Required. Id of the SQL warehouse. */ - @JsonIgnore private String id; + private String id; public GetWarehouseRequest setId(String id) { this.id = id; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(GetWarehouseRequest.class).add("id", id).toString(); } + + GetWarehouseRequestPb toPb() { + GetWarehouseRequestPb pb = new GetWarehouseRequestPb(); + pb.setId(id); + + return pb; + } + + static GetWarehouseRequest fromPb(GetWarehouseRequestPb pb) { + GetWarehouseRequest model = new GetWarehouseRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class GetWarehouseRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetWarehouseRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWarehouseRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWarehouseRequestDeserializer + extends JsonDeserializer { + @Override + public GetWarehouseRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWarehouseRequestPb pb = mapper.readValue(p, GetWarehouseRequestPb.class); + return GetWarehouseRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequestPb.java new file mode 100755 index 000000000..7ecef020b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get warehouse info */ +@Generated +class GetWarehouseRequestPb { + @JsonIgnore private String id; + + public GetWarehouseRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWarehouseRequestPb that = (GetWarehouseRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetWarehouseRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java index e0a414c5b..40f33996d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetWarehouseResponse.GetWarehouseResponseSerializer.class) +@JsonDeserialize(using = GetWarehouseResponse.GetWarehouseResponseDeserializer.class) public class GetWarehouseResponse { /** * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) @@ -17,11 +28,9 @@ public class GetWarehouseResponse { * *

Defaults to 120 mins */ - @JsonProperty("auto_stop_mins") private Long autoStopMins; /** Channel Details */ - @JsonProperty("channel") private Channel channel; /** @@ -32,11 +41,9 @@ public class GetWarehouseResponse { *

Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - * 3X-Large - 4X-Large */ - @JsonProperty("cluster_size") private String clusterSize; /** warehouse creator name */ - @JsonProperty("creator_name") private String creatorName; /** @@ -44,27 +51,21 @@ public class GetWarehouseResponse { * *

Defaults to false. */ - @JsonProperty("enable_photon") private Boolean enablePhoton; /** Configures whether the warehouse should use serverless compute */ - @JsonProperty("enable_serverless_compute") private Boolean enableServerlessCompute; /** Optional health status. Assume the warehouse is healthy if this field is not set. */ - @JsonProperty("health") private EndpointHealth health; /** unique identifier for warehouse */ - @JsonProperty("id") private String id; /** Deprecated. Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** the jdbc connection string for this warehouse */ - @JsonProperty("jdbc_url") private String jdbcUrl; /** @@ -74,7 +75,6 @@ public class GetWarehouseResponse { * *

Defaults to min_clusters if unset. */ - @JsonProperty("max_num_clusters") private Long maxNumClusters; /** @@ -87,7 +87,6 @@ public class GetWarehouseResponse { * *

Defaults to 1 */ - @JsonProperty("min_num_clusters") private Long minNumClusters; /** @@ -95,27 +94,21 @@ public class GetWarehouseResponse { * *

Supported values: - Must be unique within an org. - Must be less than 100 characters. */ - @JsonProperty("name") private String name; /** Deprecated. current number of active sessions for the warehouse */ - @JsonProperty("num_active_sessions") private Long numActiveSessions; /** current number of clusters running for the service */ - @JsonProperty("num_clusters") private Long numClusters; /** ODBC parameters for the SQL warehouse */ - @JsonProperty("odbc_params") private OdbcParams odbcParams; /** Configurations whether the warehouse should use spot instances. */ - @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; /** State of the warehouse */ - @JsonProperty("state") private State state; /** @@ -124,14 +117,12 @@ public class GetWarehouseResponse { * *

Supported values: - Number of tags < 45. */ - @JsonProperty("tags") private EndpointTags tags; /** * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to * `PRO` and also set the field `enable_serverless_compute` to `true`. */ - @JsonProperty("warehouse_type") private GetWarehouseResponseWarehouseType warehouseType; public GetWarehouseResponse setAutoStopMins(Long autoStopMins) { @@ -391,4 +382,78 @@ public String toString() { .add("warehouseType", warehouseType) .toString(); } + + GetWarehouseResponsePb toPb() { + GetWarehouseResponsePb pb = new GetWarehouseResponsePb(); + pb.setAutoStopMins(autoStopMins); + pb.setChannel(channel); + pb.setClusterSize(clusterSize); + pb.setCreatorName(creatorName); + pb.setEnablePhoton(enablePhoton); + pb.setEnableServerlessCompute(enableServerlessCompute); + pb.setHealth(health); + pb.setId(id); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setJdbcUrl(jdbcUrl); + pb.setMaxNumClusters(maxNumClusters); + pb.setMinNumClusters(minNumClusters); + pb.setName(name); + pb.setNumActiveSessions(numActiveSessions); + pb.setNumClusters(numClusters); + pb.setOdbcParams(odbcParams); + pb.setSpotInstancePolicy(spotInstancePolicy); + pb.setState(state); + pb.setTags(tags); + pb.setWarehouseType(warehouseType); + + return pb; + } + + static GetWarehouseResponse fromPb(GetWarehouseResponsePb pb) { + GetWarehouseResponse model = new GetWarehouseResponse(); + model.setAutoStopMins(pb.getAutoStopMins()); + model.setChannel(pb.getChannel()); + model.setClusterSize(pb.getClusterSize()); + model.setCreatorName(pb.getCreatorName()); + model.setEnablePhoton(pb.getEnablePhoton()); + model.setEnableServerlessCompute(pb.getEnableServerlessCompute()); + model.setHealth(pb.getHealth()); + model.setId(pb.getId()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setJdbcUrl(pb.getJdbcUrl()); + model.setMaxNumClusters(pb.getMaxNumClusters()); + model.setMinNumClusters(pb.getMinNumClusters()); + model.setName(pb.getName()); + model.setNumActiveSessions(pb.getNumActiveSessions()); + model.setNumClusters(pb.getNumClusters()); + model.setOdbcParams(pb.getOdbcParams()); + model.setSpotInstancePolicy(pb.getSpotInstancePolicy()); + model.setState(pb.getState()); + model.setTags(pb.getTags()); + model.setWarehouseType(pb.getWarehouseType()); + + return model; + } + + public static class GetWarehouseResponseSerializer extends JsonSerializer { + @Override + public void serialize( + GetWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public GetWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWarehouseResponsePb pb = mapper.readValue(p, GetWarehouseResponsePb.class); + return GetWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponsePb.java new file mode 100755 index 000000000..3fb8177e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponsePb.java @@ -0,0 +1,329 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetWarehouseResponsePb { + @JsonProperty("auto_stop_mins") + private Long autoStopMins; + + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("cluster_size") + private String clusterSize; + + @JsonProperty("creator_name") + private String creatorName; + + @JsonProperty("enable_photon") + private Boolean enablePhoton; + + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + + @JsonProperty("health") + private EndpointHealth health; + + @JsonProperty("id") + private String id; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("jdbc_url") + private String jdbcUrl; + + @JsonProperty("max_num_clusters") + private Long maxNumClusters; + + @JsonProperty("min_num_clusters") + private Long minNumClusters; + + @JsonProperty("name") + private String name; + + @JsonProperty("num_active_sessions") + private Long numActiveSessions; + + @JsonProperty("num_clusters") + private Long numClusters; + + @JsonProperty("odbc_params") + private OdbcParams odbcParams; + + @JsonProperty("spot_instance_policy") + private SpotInstancePolicy spotInstancePolicy; + + @JsonProperty("state") + private State state; + + @JsonProperty("tags") + private EndpointTags tags; + + @JsonProperty("warehouse_type") + private GetWarehouseResponseWarehouseType warehouseType; + + public GetWarehouseResponsePb setAutoStopMins(Long autoStopMins) { + this.autoStopMins = autoStopMins; + return this; + } + + public Long getAutoStopMins() { + return autoStopMins; + } + + public GetWarehouseResponsePb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public GetWarehouseResponsePb setClusterSize(String clusterSize) { + this.clusterSize = clusterSize; + return this; + } + + public String getClusterSize() { + return clusterSize; + } + + public GetWarehouseResponsePb setCreatorName(String creatorName) { + this.creatorName = creatorName; + return this; + } + + public String getCreatorName() { + return creatorName; + } + + public GetWarehouseResponsePb setEnablePhoton(Boolean enablePhoton) { + this.enablePhoton = enablePhoton; + return this; + } + + public Boolean getEnablePhoton() { + return enablePhoton; + } + + public GetWarehouseResponsePb setEnableServerlessCompute(Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + + public GetWarehouseResponsePb setHealth(EndpointHealth health) { + this.health = health; + return this; + } + + public EndpointHealth getHealth() { + return health; + } + + public GetWarehouseResponsePb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GetWarehouseResponsePb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public GetWarehouseResponsePb setJdbcUrl(String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + return this; + } + + public String getJdbcUrl() { + return jdbcUrl; + } + + public GetWarehouseResponsePb setMaxNumClusters(Long maxNumClusters) { + this.maxNumClusters = maxNumClusters; + return this; + } + + public Long getMaxNumClusters() { + return maxNumClusters; + } + + public GetWarehouseResponsePb setMinNumClusters(Long minNumClusters) { + this.minNumClusters = minNumClusters; + return this; + } + + public Long getMinNumClusters() { + return minNumClusters; + } + + public GetWarehouseResponsePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetWarehouseResponsePb setNumActiveSessions(Long numActiveSessions) { + this.numActiveSessions = numActiveSessions; + return this; + } + + public Long getNumActiveSessions() { + return numActiveSessions; + } + + public GetWarehouseResponsePb setNumClusters(Long numClusters) { + this.numClusters = numClusters; + return this; + } + + public Long getNumClusters() { + return numClusters; + } + + public GetWarehouseResponsePb setOdbcParams(OdbcParams odbcParams) { + this.odbcParams = odbcParams; + return this; + } + + public OdbcParams getOdbcParams() { + return odbcParams; + } + + public GetWarehouseResponsePb setSpotInstancePolicy(SpotInstancePolicy spotInstancePolicy) { + this.spotInstancePolicy = spotInstancePolicy; + return this; + } + + public SpotInstancePolicy getSpotInstancePolicy() { + return spotInstancePolicy; + } + + public GetWarehouseResponsePb setState(State state) { + this.state = state; + return this; + } + + public State getState() { + return state; + } + + public GetWarehouseResponsePb setTags(EndpointTags tags) { + this.tags = tags; + return this; + } + + public EndpointTags getTags() { + return tags; + } + + public GetWarehouseResponsePb setWarehouseType(GetWarehouseResponseWarehouseType warehouseType) { + this.warehouseType = warehouseType; + return this; + } + + public GetWarehouseResponseWarehouseType getWarehouseType() { + return warehouseType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWarehouseResponsePb that = (GetWarehouseResponsePb) o; + return Objects.equals(autoStopMins, that.autoStopMins) + && Objects.equals(channel, that.channel) + && Objects.equals(clusterSize, that.clusterSize) + && Objects.equals(creatorName, that.creatorName) + && Objects.equals(enablePhoton, that.enablePhoton) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) + && Objects.equals(health, that.health) + && Objects.equals(id, that.id) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(jdbcUrl, that.jdbcUrl) + && Objects.equals(maxNumClusters, that.maxNumClusters) + && Objects.equals(minNumClusters, that.minNumClusters) + && Objects.equals(name, that.name) + && Objects.equals(numActiveSessions, that.numActiveSessions) + && Objects.equals(numClusters, that.numClusters) + && Objects.equals(odbcParams, that.odbcParams) + && Objects.equals(spotInstancePolicy, that.spotInstancePolicy) + && Objects.equals(state, that.state) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseType, that.warehouseType); + } + + @Override + public int hashCode() { + return Objects.hash( + autoStopMins, + channel, + clusterSize, + creatorName, + enablePhoton, + enableServerlessCompute, + health, + id, + instanceProfileArn, + jdbcUrl, + maxNumClusters, + minNumClusters, + name, + numActiveSessions, + numClusters, + odbcParams, + spotInstancePolicy, + state, + tags, + warehouseType); + } + + @Override + public String toString() { + return new ToStringer(GetWarehouseResponsePb.class) + .add("autoStopMins", autoStopMins) + .add("channel", channel) + .add("clusterSize", clusterSize) + .add("creatorName", creatorName) + .add("enablePhoton", enablePhoton) + .add("enableServerlessCompute", enableServerlessCompute) + .add("health", health) + .add("id", id) + .add("instanceProfileArn", instanceProfileArn) + .add("jdbcUrl", jdbcUrl) + .add("maxNumClusters", maxNumClusters) + .add("minNumClusters", minNumClusters) + .add("name", name) + .add("numActiveSessions", numActiveSessions) + .add("numClusters", numClusters) + .add("odbcParams", odbcParams) + .add("spotInstancePolicy", spotInstancePolicy) + .add("state", state) + .add("tags", tags) + .add("warehouseType", warehouseType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java index 17524f5d2..6b6588994 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java @@ -4,25 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetWorkspaceWarehouseConfigResponse.GetWorkspaceWarehouseConfigResponseSerializer.class) +@JsonDeserialize( + using = + GetWorkspaceWarehouseConfigResponse.GetWorkspaceWarehouseConfigResponseDeserializer.class) public class GetWorkspaceWarehouseConfigResponse { /** Optional: Channel selection details */ - @JsonProperty("channel") private Channel channel; /** Deprecated: Use sql_configuration_parameters */ - @JsonProperty("config_param") private RepeatedEndpointConfPairs configParam; /** * Spark confs for external hive metastore configuration JSON serialized size must be less than <= * 512K */ - @JsonProperty("data_access_config") private Collection dataAccessConfig; /** @@ -32,27 +43,21 @@ public class GetWorkspaceWarehouseConfigResponse { * to be converted to another type. Used by frontend to save specific type availability in the * warehouse create and edit form UI. */ - @JsonProperty("enabled_warehouse_types") private Collection enabledWarehouseTypes; /** Deprecated: Use sql_configuration_parameters */ - @JsonProperty("global_param") private RepeatedEndpointConfPairs globalParam; /** GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage */ - @JsonProperty("google_service_account") private String googleServiceAccount; /** AWS Only: Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** Security policy for warehouses */ - @JsonProperty("security_policy") private GetWorkspaceWarehouseConfigResponseSecurityPolicy securityPolicy; /** SQL configuration parameters */ - @JsonProperty("sql_configuration_parameters") private RepeatedEndpointConfPairs sqlConfigurationParameters; public GetWorkspaceWarehouseConfigResponse setChannel(Channel channel) { @@ -184,4 +189,58 @@ public String toString() { .add("sqlConfigurationParameters", sqlConfigurationParameters) .toString(); } + + GetWorkspaceWarehouseConfigResponsePb toPb() { + GetWorkspaceWarehouseConfigResponsePb pb = new GetWorkspaceWarehouseConfigResponsePb(); + pb.setChannel(channel); + pb.setConfigParam(configParam); + pb.setDataAccessConfig(dataAccessConfig); + pb.setEnabledWarehouseTypes(enabledWarehouseTypes); + pb.setGlobalParam(globalParam); + pb.setGoogleServiceAccount(googleServiceAccount); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setSecurityPolicy(securityPolicy); + pb.setSqlConfigurationParameters(sqlConfigurationParameters); + + return pb; + } + + static GetWorkspaceWarehouseConfigResponse fromPb(GetWorkspaceWarehouseConfigResponsePb pb) { + GetWorkspaceWarehouseConfigResponse model = new GetWorkspaceWarehouseConfigResponse(); + model.setChannel(pb.getChannel()); + model.setConfigParam(pb.getConfigParam()); + model.setDataAccessConfig(pb.getDataAccessConfig()); + model.setEnabledWarehouseTypes(pb.getEnabledWarehouseTypes()); + model.setGlobalParam(pb.getGlobalParam()); + model.setGoogleServiceAccount(pb.getGoogleServiceAccount()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setSecurityPolicy(pb.getSecurityPolicy()); + model.setSqlConfigurationParameters(pb.getSqlConfigurationParameters()); + + return model; + } + + public static class GetWorkspaceWarehouseConfigResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceWarehouseConfigResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceWarehouseConfigResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceWarehouseConfigResponseDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceWarehouseConfigResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceWarehouseConfigResponsePb pb = + mapper.readValue(p, GetWorkspaceWarehouseConfigResponsePb.class); + return GetWorkspaceWarehouseConfigResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponsePb.java new file mode 100755 index 000000000..6f97660c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponsePb.java @@ -0,0 +1,172 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetWorkspaceWarehouseConfigResponsePb { + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("config_param") + private RepeatedEndpointConfPairs configParam; + + @JsonProperty("data_access_config") + private Collection dataAccessConfig; + + @JsonProperty("enabled_warehouse_types") + private Collection enabledWarehouseTypes; + + @JsonProperty("global_param") + private RepeatedEndpointConfPairs globalParam; + + @JsonProperty("google_service_account") + private String googleServiceAccount; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("security_policy") + private GetWorkspaceWarehouseConfigResponseSecurityPolicy securityPolicy; + + @JsonProperty("sql_configuration_parameters") + private RepeatedEndpointConfPairs sqlConfigurationParameters; + + public GetWorkspaceWarehouseConfigResponsePb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public GetWorkspaceWarehouseConfigResponsePb setConfigParam( + RepeatedEndpointConfPairs configParam) { + this.configParam = configParam; + return this; + } + + public RepeatedEndpointConfPairs getConfigParam() { + return configParam; + } + + public GetWorkspaceWarehouseConfigResponsePb setDataAccessConfig( + Collection dataAccessConfig) { + this.dataAccessConfig = dataAccessConfig; + return this; + } + + public Collection getDataAccessConfig() { + return dataAccessConfig; + } + + public GetWorkspaceWarehouseConfigResponsePb setEnabledWarehouseTypes( + Collection enabledWarehouseTypes) { + this.enabledWarehouseTypes = enabledWarehouseTypes; + return this; + } + + public Collection getEnabledWarehouseTypes() { + return enabledWarehouseTypes; + } + + public GetWorkspaceWarehouseConfigResponsePb setGlobalParam( + RepeatedEndpointConfPairs globalParam) { + this.globalParam = globalParam; + return this; + } + + public RepeatedEndpointConfPairs getGlobalParam() { + return globalParam; + } + + public GetWorkspaceWarehouseConfigResponsePb setGoogleServiceAccount( + String googleServiceAccount) { + this.googleServiceAccount = googleServiceAccount; + return this; + } + + public String getGoogleServiceAccount() { + return googleServiceAccount; + } + + public GetWorkspaceWarehouseConfigResponsePb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public GetWorkspaceWarehouseConfigResponsePb setSecurityPolicy( + GetWorkspaceWarehouseConfigResponseSecurityPolicy securityPolicy) { + this.securityPolicy = securityPolicy; + return this; + } + + public GetWorkspaceWarehouseConfigResponseSecurityPolicy getSecurityPolicy() { + return securityPolicy; + } + + public GetWorkspaceWarehouseConfigResponsePb setSqlConfigurationParameters( + RepeatedEndpointConfPairs sqlConfigurationParameters) { + this.sqlConfigurationParameters = sqlConfigurationParameters; + return this; + } + + public RepeatedEndpointConfPairs getSqlConfigurationParameters() { + return sqlConfigurationParameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceWarehouseConfigResponsePb that = (GetWorkspaceWarehouseConfigResponsePb) o; + return Objects.equals(channel, that.channel) + && Objects.equals(configParam, that.configParam) + && Objects.equals(dataAccessConfig, that.dataAccessConfig) + && Objects.equals(enabledWarehouseTypes, that.enabledWarehouseTypes) + && Objects.equals(globalParam, that.globalParam) + && Objects.equals(googleServiceAccount, that.googleServiceAccount) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(securityPolicy, that.securityPolicy) + && Objects.equals(sqlConfigurationParameters, that.sqlConfigurationParameters); + } + + @Override + public int hashCode() { + return Objects.hash( + channel, + configParam, + dataAccessConfig, + enabledWarehouseTypes, + globalParam, + googleServiceAccount, + instanceProfileArn, + securityPolicy, + sqlConfigurationParameters); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceWarehouseConfigResponsePb.class) + .add("channel", channel) + .add("configParam", configParam) + .add("dataAccessConfig", dataAccessConfig) + .add("enabledWarehouseTypes", enabledWarehouseTypes) + .add("globalParam", globalParam) + .add("googleServiceAccount", googleServiceAccount) + .add("instanceProfileArn", instanceProfileArn) + .add("securityPolicy", securityPolicy) + .add("sqlConfigurationParameters", sqlConfigurationParameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlert.java index ebb5709e2..f8a488020 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlert.java @@ -4,44 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = LegacyAlert.LegacyAlertSerializer.class) +@JsonDeserialize(using = LegacyAlert.LegacyAlertDeserializer.class) public class LegacyAlert { /** Timestamp when the alert was created. */ - @JsonProperty("created_at") private String createdAt; /** Alert ID. */ - @JsonProperty("id") private String id; /** Timestamp when the alert was last triggered. */ - @JsonProperty("last_triggered_at") private String lastTriggeredAt; /** Name of the alert. */ - @JsonProperty("name") private String name; /** Alert configuration options. */ - @JsonProperty("options") private AlertOptions options; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** */ - @JsonProperty("query") private AlertQuery query; /** * Number of seconds after being triggered before the alert rearms itself and can be triggered * again. If `null`, alert will never be triggered again. */ - @JsonProperty("rearm") private Long rearm; /** @@ -49,15 +52,12 @@ public class LegacyAlert { * (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger * conditions). */ - @JsonProperty("state") private LegacyAlertState state; /** Timestamp when the alert was last updated. */ - @JsonProperty("updated_at") private String updatedAt; /** */ - @JsonProperty("user") private User user; public LegacyAlert setCreatedAt(String createdAt) { @@ -209,4 +209,57 @@ public String toString() { .add("user", user) .toString(); } + + LegacyAlertPb toPb() { + LegacyAlertPb pb = new LegacyAlertPb(); + pb.setCreatedAt(createdAt); + pb.setId(id); + pb.setLastTriggeredAt(lastTriggeredAt); + pb.setName(name); + pb.setOptions(options); + pb.setParent(parent); + pb.setQuery(query); + pb.setRearm(rearm); + pb.setState(state); + pb.setUpdatedAt(updatedAt); + pb.setUser(user); + + return pb; + } + + static LegacyAlert fromPb(LegacyAlertPb pb) { + LegacyAlert model = new LegacyAlert(); + model.setCreatedAt(pb.getCreatedAt()); + model.setId(pb.getId()); + model.setLastTriggeredAt(pb.getLastTriggeredAt()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setParent(pb.getParent()); + model.setQuery(pb.getQuery()); + model.setRearm(pb.getRearm()); + model.setState(pb.getState()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUser(pb.getUser()); + + return model; + } + + public static class LegacyAlertSerializer extends JsonSerializer { + @Override + public void serialize(LegacyAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LegacyAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LegacyAlertDeserializer extends JsonDeserializer { + @Override + public LegacyAlert deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LegacyAlertPb pb = mapper.readValue(p, LegacyAlertPb.class); + return LegacyAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlertPb.java new file mode 100755 index 000000000..aafa86e24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyAlertPb.java @@ -0,0 +1,194 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class LegacyAlertPb { + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_triggered_at") + private String lastTriggeredAt; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private AlertOptions options; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("query") + private AlertQuery query; + + @JsonProperty("rearm") + private Long rearm; + + @JsonProperty("state") + private LegacyAlertState state; + + @JsonProperty("updated_at") + private String updatedAt; + + @JsonProperty("user") + private User user; + + public LegacyAlertPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public LegacyAlertPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public LegacyAlertPb setLastTriggeredAt(String lastTriggeredAt) { + this.lastTriggeredAt = lastTriggeredAt; + return this; + } + + public String getLastTriggeredAt() { + return lastTriggeredAt; + } + + public LegacyAlertPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LegacyAlertPb setOptions(AlertOptions options) { + this.options = options; + return this; + } + + public AlertOptions getOptions() { + return options; + } + + public LegacyAlertPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public LegacyAlertPb setQuery(AlertQuery query) { + this.query = query; + return this; + } + + public AlertQuery getQuery() { + return query; + } + + public LegacyAlertPb setRearm(Long rearm) { + this.rearm = rearm; + return this; + } + + public Long getRearm() { + return rearm; + } + + public LegacyAlertPb setState(LegacyAlertState state) { + this.state = state; + return this; + } + + public LegacyAlertState getState() { + return state; + } + + public LegacyAlertPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + public LegacyAlertPb setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LegacyAlertPb that = (LegacyAlertPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(id, that.id) + && Objects.equals(lastTriggeredAt, that.lastTriggeredAt) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(parent, that.parent) + && Objects.equals(query, that.query) + && Objects.equals(rearm, that.rearm) + && Objects.equals(state, that.state) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, + id, + lastTriggeredAt, + name, + options, + parent, + query, + rearm, + state, + updatedAt, + user); + } + + @Override + public String toString() { + return new ToStringer(LegacyAlertPb.class) + .add("createdAt", createdAt) + .add("id", id) + .add("lastTriggeredAt", lastTriggeredAt) + .add("name", name) + .add("options", options) + .add("parent", parent) + .add("query", query) + .add("rearm", rearm) + .add("state", state) + .add("updatedAt", updatedAt) + .add("user", user) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQuery.java index db6b097a6..9472298dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQuery.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = LegacyQuery.LegacyQuerySerializer.class) +@JsonDeserialize(using = LegacyQuery.LegacyQueryDeserializer.class) public class LegacyQuery { /** Describes whether the authenticated user is allowed to edit the definition of this query. */ - @JsonProperty("can_edit") private Boolean canEdit; /** The timestamp when this query was created. */ - @JsonProperty("created_at") private String createdAt; /** @@ -24,17 +33,14 @@ public class LegacyQuery { * *

[Learn more]: https://docs.databricks.com/api/workspace/datasources/list */ - @JsonProperty("data_source_id") private String dataSourceId; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** Query ID. */ - @JsonProperty("id") private String id; /** @@ -42,21 +48,18 @@ public class LegacyQuery { * in search results. If this boolean is `true`, the `options` property for this query includes a * `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days. */ - @JsonProperty("is_archived") private Boolean isArchived; /** * Whether the query is a draft. Draft queries only appear in list views for their owners. * Visualizations from draft queries cannot appear on dashboards. */ - @JsonProperty("is_draft") private Boolean isDraft; /** * Whether this query object appears in the current user's favorites list. This flag determines * whether the star icon for favorites is selected. */ - @JsonProperty("is_favorite") private Boolean isFavorite; /** @@ -64,76 +67,60 @@ public class LegacyQuery { * Boolean parameter to `true` if a query either does not use any text type parameters or uses a * data source type where text type parameters are handled safely. */ - @JsonProperty("is_safe") private Boolean isSafe; /** */ - @JsonProperty("last_modified_by") private User lastModifiedBy; /** The ID of the user who last saved changes to this query. */ - @JsonProperty("last_modified_by_id") private Long lastModifiedById; /** * If there is a cached result for this query and user, this field includes the query result ID. * If this query uses parameters, this field is always null. */ - @JsonProperty("latest_query_data_id") private String latestQueryDataId; /** The title of this query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("name") private String name; /** */ - @JsonProperty("options") private QueryOptions options; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the * query * `CAN_MANAGE`: Can manage the query */ - @JsonProperty("permission_tier") private PermissionLevel permissionTier; /** The text of the query to be run. */ - @JsonProperty("query") private String query; /** A SHA-256 hash of the query text along with the authenticated user ID. */ - @JsonProperty("query_hash") private String queryHash; /** * Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as * viewer" behavior) or `"owner"` (signifying "run as owner" behavior) */ - @JsonProperty("run_as_role") private RunAsRole runAsRole; /** */ - @JsonProperty("tags") private Collection tags; /** The timestamp at which this query was last updated. */ - @JsonProperty("updated_at") private String updatedAt; /** */ - @JsonProperty("user") private User user; /** The ID of the user who owns the query. */ - @JsonProperty("user_id") private Long userId; /** */ - @JsonProperty("visualizations") private Collection visualizations; public LegacyQuery setCanEdit(Boolean canEdit) { @@ -441,4 +428,83 @@ public String toString() { .add("visualizations", visualizations) .toString(); } + + LegacyQueryPb toPb() { + LegacyQueryPb pb = new LegacyQueryPb(); + pb.setCanEdit(canEdit); + pb.setCreatedAt(createdAt); + pb.setDataSourceId(dataSourceId); + pb.setDescription(description); + pb.setId(id); + pb.setIsArchived(isArchived); + pb.setIsDraft(isDraft); + pb.setIsFavorite(isFavorite); + pb.setIsSafe(isSafe); + pb.setLastModifiedBy(lastModifiedBy); + pb.setLastModifiedById(lastModifiedById); + pb.setLatestQueryDataId(latestQueryDataId); + pb.setName(name); + pb.setOptions(options); + pb.setParent(parent); + pb.setPermissionTier(permissionTier); + pb.setQuery(query); + pb.setQueryHash(queryHash); + pb.setRunAsRole(runAsRole); + pb.setTags(tags); + pb.setUpdatedAt(updatedAt); + pb.setUser(user); + pb.setUserId(userId); + pb.setVisualizations(visualizations); + + return pb; + } + + static LegacyQuery fromPb(LegacyQueryPb pb) { + LegacyQuery model = new LegacyQuery(); + model.setCanEdit(pb.getCanEdit()); + model.setCreatedAt(pb.getCreatedAt()); + model.setDataSourceId(pb.getDataSourceId()); + model.setDescription(pb.getDescription()); + model.setId(pb.getId()); + model.setIsArchived(pb.getIsArchived()); + model.setIsDraft(pb.getIsDraft()); + model.setIsFavorite(pb.getIsFavorite()); + model.setIsSafe(pb.getIsSafe()); + model.setLastModifiedBy(pb.getLastModifiedBy()); + model.setLastModifiedById(pb.getLastModifiedById()); + model.setLatestQueryDataId(pb.getLatestQueryDataId()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setParent(pb.getParent()); + model.setPermissionTier(pb.getPermissionTier()); + model.setQuery(pb.getQuery()); + model.setQueryHash(pb.getQueryHash()); + model.setRunAsRole(pb.getRunAsRole()); + model.setTags(pb.getTags()); + model.setUpdatedAt(pb.getUpdatedAt()); + model.setUser(pb.getUser()); + model.setUserId(pb.getUserId()); + model.setVisualizations(pb.getVisualizations()); + + return model; + } + + public static class LegacyQuerySerializer extends JsonSerializer { + @Override + public void serialize(LegacyQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LegacyQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LegacyQueryDeserializer extends JsonDeserializer { + @Override + public LegacyQuery deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LegacyQueryPb pb = mapper.readValue(p, LegacyQueryPb.class); + return LegacyQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQueryPb.java new file mode 100755 index 000000000..1042b705b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyQueryPb.java @@ -0,0 +1,390 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class LegacyQueryPb { + @JsonProperty("can_edit") + private Boolean canEdit; + + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("data_source_id") + private String dataSourceId; + + @JsonProperty("description") + private String description; + + @JsonProperty("id") + private String id; + + @JsonProperty("is_archived") + private Boolean isArchived; + + @JsonProperty("is_draft") + private Boolean isDraft; + + @JsonProperty("is_favorite") + private Boolean isFavorite; + + @JsonProperty("is_safe") + private Boolean isSafe; + + @JsonProperty("last_modified_by") + private User lastModifiedBy; + + @JsonProperty("last_modified_by_id") + private Long lastModifiedById; + + @JsonProperty("latest_query_data_id") + private String latestQueryDataId; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private QueryOptions options; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("permission_tier") + private PermissionLevel permissionTier; + + @JsonProperty("query") + private String query; + + @JsonProperty("query_hash") + private String queryHash; + + @JsonProperty("run_as_role") + private RunAsRole runAsRole; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("updated_at") + private String updatedAt; + + @JsonProperty("user") + private User user; + + @JsonProperty("user_id") + private Long userId; + + @JsonProperty("visualizations") + private Collection visualizations; + + public LegacyQueryPb setCanEdit(Boolean canEdit) { + this.canEdit = canEdit; + return this; + } + + public Boolean getCanEdit() { + return canEdit; + } + + public LegacyQueryPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public LegacyQueryPb setDataSourceId(String dataSourceId) { + this.dataSourceId = dataSourceId; + return this; + } + + public String getDataSourceId() { + return dataSourceId; + } + + public LegacyQueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public LegacyQueryPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public LegacyQueryPb setIsArchived(Boolean isArchived) { + this.isArchived = isArchived; + return this; + } + + public Boolean getIsArchived() { + return isArchived; + } + + public LegacyQueryPb setIsDraft(Boolean isDraft) { + this.isDraft = isDraft; + return this; + } + + public Boolean getIsDraft() { + return isDraft; + } + + public LegacyQueryPb setIsFavorite(Boolean isFavorite) { + this.isFavorite = isFavorite; + return this; + } + + public Boolean getIsFavorite() { + return isFavorite; + } + + public LegacyQueryPb setIsSafe(Boolean isSafe) { + this.isSafe = isSafe; + return this; + } + + public Boolean getIsSafe() { + return isSafe; + } + + public LegacyQueryPb setLastModifiedBy(User lastModifiedBy) { + this.lastModifiedBy = lastModifiedBy; + return this; + } + + public User getLastModifiedBy() { + return lastModifiedBy; + } + + public LegacyQueryPb setLastModifiedById(Long lastModifiedById) { + this.lastModifiedById = lastModifiedById; + return this; + } + + public Long getLastModifiedById() { + return lastModifiedById; + } + + public LegacyQueryPb setLatestQueryDataId(String latestQueryDataId) { + this.latestQueryDataId = latestQueryDataId; + return this; + } + + public String getLatestQueryDataId() { + return latestQueryDataId; + } + + public LegacyQueryPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LegacyQueryPb setOptions(QueryOptions options) { + this.options = options; + return this; + } + + public QueryOptions getOptions() { + return options; + } + + public LegacyQueryPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public LegacyQueryPb setPermissionTier(PermissionLevel permissionTier) { + this.permissionTier = permissionTier; + return this; + } + + public PermissionLevel getPermissionTier() { + return permissionTier; + } + + public LegacyQueryPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public LegacyQueryPb setQueryHash(String queryHash) { + this.queryHash = queryHash; + return this; + } + + public String getQueryHash() { + return queryHash; + } + + public LegacyQueryPb setRunAsRole(RunAsRole runAsRole) { + this.runAsRole = runAsRole; + return this; + } + + public RunAsRole getRunAsRole() { + return runAsRole; + } + + public LegacyQueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public LegacyQueryPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + public LegacyQueryPb setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + public LegacyQueryPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + public LegacyQueryPb setVisualizations(Collection visualizations) { + this.visualizations = visualizations; + return this; + } + + public Collection getVisualizations() { + return visualizations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LegacyQueryPb that = (LegacyQueryPb) o; + return Objects.equals(canEdit, that.canEdit) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(dataSourceId, that.dataSourceId) + && Objects.equals(description, that.description) + && Objects.equals(id, that.id) + && Objects.equals(isArchived, that.isArchived) + && Objects.equals(isDraft, that.isDraft) + && Objects.equals(isFavorite, that.isFavorite) + && Objects.equals(isSafe, that.isSafe) + && Objects.equals(lastModifiedBy, that.lastModifiedBy) + && Objects.equals(lastModifiedById, that.lastModifiedById) + && Objects.equals(latestQueryDataId, that.latestQueryDataId) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(parent, that.parent) + && Objects.equals(permissionTier, that.permissionTier) + && Objects.equals(query, that.query) + && Objects.equals(queryHash, that.queryHash) + && Objects.equals(runAsRole, that.runAsRole) + && Objects.equals(tags, that.tags) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(user, that.user) + && Objects.equals(userId, that.userId) + && Objects.equals(visualizations, that.visualizations); + } + + @Override + public int hashCode() { + return Objects.hash( + canEdit, + createdAt, + dataSourceId, + description, + id, + isArchived, + isDraft, + isFavorite, + isSafe, + lastModifiedBy, + lastModifiedById, + latestQueryDataId, + name, + options, + parent, + permissionTier, + query, + queryHash, + runAsRole, + tags, + updatedAt, + user, + userId, + visualizations); + } + + @Override + public String toString() { + return new ToStringer(LegacyQueryPb.class) + .add("canEdit", canEdit) + .add("createdAt", createdAt) + .add("dataSourceId", dataSourceId) + .add("description", description) + .add("id", id) + .add("isArchived", isArchived) + .add("isDraft", isDraft) + .add("isFavorite", isFavorite) + .add("isSafe", isSafe) + .add("lastModifiedBy", lastModifiedBy) + .add("lastModifiedById", lastModifiedById) + .add("latestQueryDataId", latestQueryDataId) + .add("name", name) + .add("options", options) + .add("parent", parent) + .add("permissionTier", permissionTier) + .add("query", query) + .add("queryHash", queryHash) + .add("runAsRole", runAsRole) + .add("tags", tags) + .add("updatedAt", updatedAt) + .add("user", user) + .add("userId", userId) + .add("visualizations", visualizations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualization.java index 2e8701e6b..83a5f76f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualization.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualization.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -14,40 +23,34 @@ * constructing ad-hoc visualizations entirely in JSON. */ @Generated +@JsonSerialize(using = LegacyVisualization.LegacyVisualizationSerializer.class) +@JsonDeserialize(using = LegacyVisualization.LegacyVisualizationDeserializer.class) public class LegacyVisualization { /** */ - @JsonProperty("created_at") private String createdAt; /** A short description of this visualization. This is not displayed in the UI. */ - @JsonProperty("description") private String description; /** The UUID for this visualization. */ - @JsonProperty("id") private String id; /** The name of the visualization that appears on dashboards and the query screen. */ - @JsonProperty("name") private String name; /** * The options object varies widely from one visualization type to the next and is unsupported. * Databricks does not recommend modifying visualization settings in JSON. */ - @JsonProperty("options") private Object options; /** */ - @JsonProperty("query") private LegacyQuery query; /** The type of visualization: chart, table, pivot table, and so on. */ - @JsonProperty("type") private String typeValue; /** */ - @JsonProperty("updated_at") private String updatedAt; public LegacyVisualization setCreatedAt(String createdAt) { @@ -155,4 +158,53 @@ public String toString() { .add("updatedAt", updatedAt) .toString(); } + + LegacyVisualizationPb toPb() { + LegacyVisualizationPb pb = new LegacyVisualizationPb(); + pb.setCreatedAt(createdAt); + pb.setDescription(description); + pb.setId(id); + pb.setName(name); + pb.setOptions(options); + pb.setQuery(query); + pb.setType(typeValue); + pb.setUpdatedAt(updatedAt); + + return pb; + } + + static LegacyVisualization fromPb(LegacyVisualizationPb pb) { + LegacyVisualization model = new LegacyVisualization(); + model.setCreatedAt(pb.getCreatedAt()); + model.setDescription(pb.getDescription()); + model.setId(pb.getId()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setQuery(pb.getQuery()); + model.setType(pb.getType()); + model.setUpdatedAt(pb.getUpdatedAt()); + + return model; + } + + public static class LegacyVisualizationSerializer extends JsonSerializer { + @Override + public void serialize(LegacyVisualization value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + LegacyVisualizationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class LegacyVisualizationDeserializer + extends JsonDeserializer { + @Override + public LegacyVisualization deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + LegacyVisualizationPb pb = mapper.readValue(p, LegacyVisualizationPb.class); + return LegacyVisualization.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualizationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualizationPb.java new file mode 100755 index 000000000..1a762c2fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LegacyVisualizationPb.java @@ -0,0 +1,147 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The visualization description API changes frequently and is unsupported. You can duplicate a + * visualization by copying description objects received _from the API_ and then using them to + * create a new one with a POST request to the same endpoint. Databricks does not recommend + * constructing ad-hoc visualizations entirely in JSON. + */ +@Generated +class LegacyVisualizationPb { + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("description") + private String description; + + @JsonProperty("id") + private String id; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Object options; + + @JsonProperty("query") + private LegacyQuery query; + + @JsonProperty("type") + private String typeValue; + + @JsonProperty("updated_at") + private String updatedAt; + + public LegacyVisualizationPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public LegacyVisualizationPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public LegacyVisualizationPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public LegacyVisualizationPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LegacyVisualizationPb setOptions(Object options) { + this.options = options; + return this; + } + + public Object getOptions() { + return options; + } + + public LegacyVisualizationPb setQuery(LegacyQuery query) { + this.query = query; + return this; + } + + public LegacyQuery getQuery() { + return query; + } + + public LegacyVisualizationPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + public LegacyVisualizationPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LegacyVisualizationPb that = (LegacyVisualizationPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(description, that.description) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(query, that.query) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash(createdAt, description, id, name, options, query, typeValue, updatedAt); + } + + @Override + public String toString() { + return new ToStringer(LegacyVisualizationPb.class) + .add("createdAt", createdAt) + .add("description", description) + .add("id", id) + .add("name", name) + .add("options", options) + .add("query", query) + .add("typeValue", typeValue) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequest.java index f5784da65..3f62c61c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List alerts */ @Generated +@JsonSerialize(using = ListAlertsRequest.ListAlertsRequestSerializer.class) +@JsonDeserialize(using = ListAlertsRequest.ListAlertsRequestDeserializer.class) public class ListAlertsRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAlertsRequest setPageSize(Long pageSize) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAlertsRequestPb toPb() { + ListAlertsRequestPb pb = new ListAlertsRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAlertsRequest fromPb(ListAlertsRequestPb pb) { + ListAlertsRequest model = new ListAlertsRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAlertsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListAlertsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAlertsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAlertsRequestDeserializer extends JsonDeserializer { + @Override + public ListAlertsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAlertsRequestPb pb = mapper.readValue(p, ListAlertsRequestPb.class); + return ListAlertsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequestPb.java new file mode 100755 index 000000000..883651b85 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List alerts */ +@Generated +class ListAlertsRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAlertsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAlertsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAlertsRequestPb that = (ListAlertsRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAlertsRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponse.java index ef9cfa016..97df0c6ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAlertsResponse.ListAlertsResponseSerializer.class) +@JsonDeserialize(using = ListAlertsResponse.ListAlertsResponseDeserializer.class) public class ListAlertsResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("results") private Collection results; public ListAlertsResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,40 @@ public String toString() { .add("results", results) .toString(); } + + ListAlertsResponsePb toPb() { + ListAlertsResponsePb pb = new ListAlertsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setResults(results); + + return pb; + } + + static ListAlertsResponse fromPb(ListAlertsResponsePb pb) { + ListAlertsResponse model = new ListAlertsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListAlertsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListAlertsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAlertsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAlertsResponseDeserializer extends JsonDeserializer { + @Override + public ListAlertsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAlertsResponsePb pb = mapper.readValue(p, ListAlertsResponsePb.class); + return ListAlertsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java index 9489f808c..8a71ae53d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ListAlertsResponseAlert.ListAlertsResponseAlertSerializer.class) +@JsonDeserialize(using = ListAlertsResponseAlert.ListAlertsResponseAlertDeserializer.class) public class ListAlertsResponseAlert { /** Trigger conditions of the alert. */ - @JsonProperty("condition") private AlertCondition condition; /** The timestamp indicating when the alert was created. */ - @JsonProperty("create_time") private String createTime; /** @@ -22,7 +31,6 @@ public class ListAlertsResponseAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_body") private String customBody; /** @@ -31,53 +39,42 @@ public class ListAlertsResponseAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_subject") private String customSubject; /** The display name of the alert. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying the alert. */ - @JsonProperty("id") private String id; /** The workspace state of the alert. Used for tracking trashed status. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** Whether to notify alert subscribers when alert returns back to normal. */ - @JsonProperty("notify_on_ok") private Boolean notifyOnOk; /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** UUID of the query attached to the alert. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it * can be triggered again. If 0 or not specified, the alert will not be triggered again. */ - @JsonProperty("seconds_to_retrigger") private Long secondsToRetrigger; /** * Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not * yet been evaluated or ran into an error during the last evaluation. */ - @JsonProperty("state") private AlertState state; /** Timestamp when the alert was last triggered, if the alert has been triggered before. */ - @JsonProperty("trigger_time") private String triggerTime; /** The timestamp indicating when the alert was updated. */ - @JsonProperty("update_time") private String updateTime; public ListAlertsResponseAlert setCondition(AlertCondition condition) { @@ -265,4 +262,67 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + ListAlertsResponseAlertPb toPb() { + ListAlertsResponseAlertPb pb = new ListAlertsResponseAlertPb(); + pb.setCondition(condition); + pb.setCreateTime(createTime); + pb.setCustomBody(customBody); + pb.setCustomSubject(customSubject); + pb.setDisplayName(displayName); + pb.setId(id); + pb.setLifecycleState(lifecycleState); + pb.setNotifyOnOk(notifyOnOk); + pb.setOwnerUserName(ownerUserName); + pb.setQueryId(queryId); + pb.setSecondsToRetrigger(secondsToRetrigger); + pb.setState(state); + pb.setTriggerTime(triggerTime); + pb.setUpdateTime(updateTime); + + return pb; + } + + static ListAlertsResponseAlert fromPb(ListAlertsResponseAlertPb pb) { + ListAlertsResponseAlert model = new ListAlertsResponseAlert(); + model.setCondition(pb.getCondition()); + model.setCreateTime(pb.getCreateTime()); + model.setCustomBody(pb.getCustomBody()); + model.setCustomSubject(pb.getCustomSubject()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + model.setLifecycleState(pb.getLifecycleState()); + model.setNotifyOnOk(pb.getNotifyOnOk()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setQueryId(pb.getQueryId()); + model.setSecondsToRetrigger(pb.getSecondsToRetrigger()); + model.setState(pb.getState()); + model.setTriggerTime(pb.getTriggerTime()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class ListAlertsResponseAlertSerializer + extends JsonSerializer { + @Override + public void serialize( + ListAlertsResponseAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAlertsResponseAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAlertsResponseAlertDeserializer + extends JsonDeserializer { + @Override + public ListAlertsResponseAlert deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAlertsResponseAlertPb pb = mapper.readValue(p, ListAlertsResponseAlertPb.class); + return ListAlertsResponseAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlertPb.java new file mode 100755 index 000000000..b0d5a1a44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlertPb.java @@ -0,0 +1,239 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ListAlertsResponseAlertPb { + @JsonProperty("condition") + private AlertCondition condition; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("custom_body") + private String customBody; + + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("seconds_to_retrigger") + private Long secondsToRetrigger; + + @JsonProperty("state") + private AlertState state; + + @JsonProperty("trigger_time") + private String triggerTime; + + @JsonProperty("update_time") + private String updateTime; + + public ListAlertsResponseAlertPb setCondition(AlertCondition condition) { + this.condition = condition; + return this; + } + + public AlertCondition getCondition() { + return condition; + } + + public ListAlertsResponseAlertPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public ListAlertsResponseAlertPb setCustomBody(String customBody) { + this.customBody = customBody; + return this; + } + + public String getCustomBody() { + return customBody; + } + + public ListAlertsResponseAlertPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public ListAlertsResponseAlertPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ListAlertsResponseAlertPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListAlertsResponseAlertPb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public ListAlertsResponseAlertPb setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + + public ListAlertsResponseAlertPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public ListAlertsResponseAlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public ListAlertsResponseAlertPb setSecondsToRetrigger(Long secondsToRetrigger) { + this.secondsToRetrigger = secondsToRetrigger; + return this; + } + + public Long getSecondsToRetrigger() { + return secondsToRetrigger; + } + + public ListAlertsResponseAlertPb setState(AlertState state) { + this.state = state; + return this; + } + + public AlertState getState() { + return state; + } + + public ListAlertsResponseAlertPb setTriggerTime(String triggerTime) { + this.triggerTime = triggerTime; + return this; + } + + public String getTriggerTime() { + return triggerTime; + } + + public ListAlertsResponseAlertPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAlertsResponseAlertPb that = (ListAlertsResponseAlertPb) o; + return Objects.equals(condition, that.condition) + && Objects.equals(createTime, that.createTime) + && Objects.equals(customBody, that.customBody) + && Objects.equals(customSubject, that.customSubject) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(notifyOnOk, that.notifyOnOk) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(queryId, that.queryId) + && Objects.equals(secondsToRetrigger, that.secondsToRetrigger) + && Objects.equals(state, that.state) + && Objects.equals(triggerTime, that.triggerTime) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + condition, + createTime, + customBody, + customSubject, + displayName, + id, + lifecycleState, + notifyOnOk, + ownerUserName, + queryId, + secondsToRetrigger, + state, + triggerTime, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(ListAlertsResponseAlertPb.class) + .add("condition", condition) + .add("createTime", createTime) + .add("customBody", customBody) + .add("customSubject", customSubject) + .add("displayName", displayName) + .add("id", id) + .add("lifecycleState", lifecycleState) + .add("notifyOnOk", notifyOnOk) + .add("ownerUserName", ownerUserName) + .add("queryId", queryId) + .add("secondsToRetrigger", secondsToRetrigger) + .add("state", state) + .add("triggerTime", triggerTime) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponsePb.java new file mode 100755 index 000000000..738c3a1e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAlertsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("results") + private Collection results; + + public ListAlertsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListAlertsResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAlertsResponsePb that = (ListAlertsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, results); + } + + @Override + public String toString() { + return new ToStringer(ListAlertsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java index dc51f962e..1ba4dc3f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List alerts */ @Generated +@JsonSerialize(using = ListAlertsV2Request.ListAlertsV2RequestSerializer.class) +@JsonDeserialize(using = ListAlertsV2Request.ListAlertsV2RequestDeserializer.class) public class ListAlertsV2Request { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListAlertsV2Request setPageSize(Long pageSize) { @@ -59,4 +65,41 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListAlertsV2RequestPb toPb() { + ListAlertsV2RequestPb pb = new ListAlertsV2RequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListAlertsV2Request fromPb(ListAlertsV2RequestPb pb) { + ListAlertsV2Request model = new ListAlertsV2Request(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListAlertsV2RequestSerializer extends JsonSerializer { + @Override + public void serialize(ListAlertsV2Request value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAlertsV2RequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAlertsV2RequestDeserializer + extends JsonDeserializer { + @Override + public ListAlertsV2Request deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAlertsV2RequestPb pb = mapper.readValue(p, ListAlertsV2RequestPb.class); + return ListAlertsV2Request.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2RequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2RequestPb.java new file mode 100755 index 000000000..71f14265d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2RequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List alerts */ +@Generated +class ListAlertsV2RequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListAlertsV2RequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAlertsV2RequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAlertsV2RequestPb that = (ListAlertsV2RequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAlertsV2RequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java index d9f0f142b..932226608 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAlertsV2Response.ListAlertsV2ResponseSerializer.class) +@JsonDeserialize(using = ListAlertsV2Response.ListAlertsV2ResponseDeserializer.class) public class ListAlertsV2Response { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("results") private Collection results; public ListAlertsV2Response setNextPageToken(String nextPageToken) { @@ -57,4 +66,42 @@ public String toString() { .add("results", results) .toString(); } + + ListAlertsV2ResponsePb toPb() { + ListAlertsV2ResponsePb pb = new ListAlertsV2ResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setResults(results); + + return pb; + } + + static ListAlertsV2Response fromPb(ListAlertsV2ResponsePb pb) { + ListAlertsV2Response model = new ListAlertsV2Response(); + model.setNextPageToken(pb.getNextPageToken()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListAlertsV2ResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListAlertsV2Response value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAlertsV2ResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAlertsV2ResponseDeserializer + extends JsonDeserializer { + @Override + public ListAlertsV2Response deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAlertsV2ResponsePb pb = mapper.readValue(p, ListAlertsV2ResponsePb.class); + return ListAlertsV2Response.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponsePb.java new file mode 100755 index 000000000..9ace16feb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAlertsV2ResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("results") + private Collection results; + + public ListAlertsV2ResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListAlertsV2ResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAlertsV2ResponsePb that = (ListAlertsV2ResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, results); + } + + @Override + public String toString() { + return new ToStringer(ListAlertsV2ResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequest.java index 9ec702ba6..0061c6e9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequest.java @@ -3,32 +3,34 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get dashboard objects */ @Generated +@JsonSerialize(using = ListDashboardsRequest.ListDashboardsRequestSerializer.class) +@JsonDeserialize(using = ListDashboardsRequest.ListDashboardsRequestDeserializer.class) public class ListDashboardsRequest { /** Name of dashboard attribute to order by. */ - @JsonIgnore - @QueryParam("order") private ListOrder order; /** Page number to retrieve. */ - @JsonIgnore - @QueryParam("page") private Long page; /** Number of dashboards to return per page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Full text search term. */ - @JsonIgnore - @QueryParam("q") private String q; public ListDashboardsRequest setOrder(ListOrder order) { @@ -92,4 +94,47 @@ public String toString() { .add("q", q) .toString(); } + + ListDashboardsRequestPb toPb() { + ListDashboardsRequestPb pb = new ListDashboardsRequestPb(); + pb.setOrder(order); + pb.setPage(page); + pb.setPageSize(pageSize); + pb.setQ(q); + + return pb; + } + + static ListDashboardsRequest fromPb(ListDashboardsRequestPb pb) { + ListDashboardsRequest model = new ListDashboardsRequest(); + model.setOrder(pb.getOrder()); + model.setPage(pb.getPage()); + model.setPageSize(pb.getPageSize()); + model.setQ(pb.getQ()); + + return model; + } + + public static class ListDashboardsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListDashboardsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListDashboardsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListDashboardsRequestDeserializer + extends JsonDeserializer { + @Override + public ListDashboardsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListDashboardsRequestPb pb = mapper.readValue(p, ListDashboardsRequestPb.class); + return ListDashboardsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequestPb.java new file mode 100755 index 000000000..6b81af92d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get dashboard objects */ +@Generated +class ListDashboardsRequestPb { + @JsonIgnore + @QueryParam("order") + private ListOrder order; + + @JsonIgnore + @QueryParam("page") + private Long page; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("q") + private String q; + + public ListDashboardsRequestPb setOrder(ListOrder order) { + this.order = order; + return this; + } + + public ListOrder getOrder() { + return order; + } + + public ListDashboardsRequestPb setPage(Long page) { + this.page = page; + return this; + } + + public Long getPage() { + return page; + } + + public ListDashboardsRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDashboardsRequestPb setQ(String q) { + this.q = q; + return this; + } + + public String getQ() { + return q; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDashboardsRequestPb that = (ListDashboardsRequestPb) o; + return Objects.equals(order, that.order) + && Objects.equals(page, that.page) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(q, that.q); + } + + @Override + public int hashCode() { + return Objects.hash(order, page, pageSize, q); + } + + @Override + public String toString() { + return new ToStringer(ListDashboardsRequestPb.class) + .add("order", order) + .add("page", page) + .add("pageSize", pageSize) + .add("q", q) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequest.java index ef5047cc8..6a283081e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a list of queries */ @Generated +@JsonSerialize(using = ListQueriesLegacyRequest.ListQueriesLegacyRequestSerializer.class) +@JsonDeserialize(using = ListQueriesLegacyRequest.ListQueriesLegacyRequestDeserializer.class) public class ListQueriesLegacyRequest { /** * Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to @@ -26,23 +36,15 @@ public class ListQueriesLegacyRequest { * *

- `created_by`: The user name of the user that created the query. */ - @JsonIgnore - @QueryParam("order") private String order; /** Page number to retrieve. */ - @JsonIgnore - @QueryParam("page") private Long page; /** Number of queries to return per page. */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** Full text search term */ - @JsonIgnore - @QueryParam("q") private String q; public ListQueriesLegacyRequest setOrder(String order) { @@ -106,4 +108,47 @@ public String toString() { .add("q", q) .toString(); } + + ListQueriesLegacyRequestPb toPb() { + ListQueriesLegacyRequestPb pb = new ListQueriesLegacyRequestPb(); + pb.setOrder(order); + pb.setPage(page); + pb.setPageSize(pageSize); + pb.setQ(q); + + return pb; + } + + static ListQueriesLegacyRequest fromPb(ListQueriesLegacyRequestPb pb) { + ListQueriesLegacyRequest model = new ListQueriesLegacyRequest(); + model.setOrder(pb.getOrder()); + model.setPage(pb.getPage()); + model.setPageSize(pb.getPageSize()); + model.setQ(pb.getQ()); + + return model; + } + + public static class ListQueriesLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListQueriesLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueriesLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueriesLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public ListQueriesLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueriesLegacyRequestPb pb = mapper.readValue(p, ListQueriesLegacyRequestPb.class); + return ListQueriesLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequestPb.java new file mode 100755 index 000000000..4fadca372 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a list of queries */ +@Generated +class ListQueriesLegacyRequestPb { + @JsonIgnore + @QueryParam("order") + private String order; + + @JsonIgnore + @QueryParam("page") + private Long page; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("q") + private String q; + + public ListQueriesLegacyRequestPb setOrder(String order) { + this.order = order; + return this; + } + + public String getOrder() { + return order; + } + + public ListQueriesLegacyRequestPb setPage(Long page) { + this.page = page; + return this; + } + + public Long getPage() { + return page; + } + + public ListQueriesLegacyRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListQueriesLegacyRequestPb setQ(String q) { + this.q = q; + return this; + } + + public String getQ() { + return q; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueriesLegacyRequestPb that = (ListQueriesLegacyRequestPb) o; + return Objects.equals(order, that.order) + && Objects.equals(page, that.page) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(q, that.q); + } + + @Override + public int hashCode() { + return Objects.hash(order, page, pageSize, q); + } + + @Override + public String toString() { + return new ToStringer(ListQueriesLegacyRequestPb.class) + .add("order", order) + .add("page", page) + .add("pageSize", pageSize) + .add("q", q) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequest.java index e70416db7..5581b60ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List queries */ @Generated +@JsonSerialize(using = ListQueriesRequest.ListQueriesRequestSerializer.class) +@JsonDeserialize(using = ListQueriesRequest.ListQueriesRequestDeserializer.class) public class ListQueriesRequest { /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListQueriesRequest setPageSize(Long pageSize) { @@ -59,4 +65,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListQueriesRequestPb toPb() { + ListQueriesRequestPb pb = new ListQueriesRequestPb(); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListQueriesRequest fromPb(ListQueriesRequestPb pb) { + ListQueriesRequest model = new ListQueriesRequest(); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListQueriesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListQueriesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueriesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueriesRequestDeserializer extends JsonDeserializer { + @Override + public ListQueriesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueriesRequestPb pb = mapper.readValue(p, ListQueriesRequestPb.class); + return ListQueriesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequestPb.java new file mode 100755 index 000000000..462f22655 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List queries */ +@Generated +class ListQueriesRequestPb { + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListQueriesRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListQueriesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueriesRequestPb that = (ListQueriesRequestPb) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQueriesRequestPb.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponse.java index 84516eacb..9673d593b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponse.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListQueriesResponse.ListQueriesResponseSerializer.class) +@JsonDeserialize(using = ListQueriesResponse.ListQueriesResponseDeserializer.class) public class ListQueriesResponse { /** Whether there is another page of results. */ - @JsonProperty("has_next_page") private Boolean hasNextPage; /** A token that can be used to get the next page of results. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("res") private Collection res; public ListQueriesResponse setHasNextPage(Boolean hasNextPage) { @@ -72,4 +80,43 @@ public String toString() { .add("res", res) .toString(); } + + ListQueriesResponsePb toPb() { + ListQueriesResponsePb pb = new ListQueriesResponsePb(); + pb.setHasNextPage(hasNextPage); + pb.setNextPageToken(nextPageToken); + pb.setRes(res); + + return pb; + } + + static ListQueriesResponse fromPb(ListQueriesResponsePb pb) { + ListQueriesResponse model = new ListQueriesResponse(); + model.setHasNextPage(pb.getHasNextPage()); + model.setNextPageToken(pb.getNextPageToken()); + model.setRes(pb.getRes()); + + return model; + } + + public static class ListQueriesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListQueriesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueriesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueriesResponseDeserializer + extends JsonDeserializer { + @Override + public ListQueriesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueriesResponsePb pb = mapper.readValue(p, ListQueriesResponsePb.class); + return ListQueriesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponsePb.java new file mode 100755 index 000000000..c463e4a02 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListQueriesResponsePb { + @JsonProperty("has_next_page") + private Boolean hasNextPage; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("res") + private Collection res; + + public ListQueriesResponsePb setHasNextPage(Boolean hasNextPage) { + this.hasNextPage = hasNextPage; + return this; + } + + public Boolean getHasNextPage() { + return hasNextPage; + } + + public ListQueriesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQueriesResponsePb setRes(Collection res) { + this.res = res; + return this; + } + + public Collection getRes() { + return res; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueriesResponsePb that = (ListQueriesResponsePb) o; + return Objects.equals(hasNextPage, that.hasNextPage) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(res, that.res); + } + + @Override + public int hashCode() { + return Objects.hash(hasNextPage, nextPageToken, res); + } + + @Override + public String toString() { + return new ToStringer(ListQueriesResponsePb.class) + .add("hasNextPage", hasNextPage) + .add("nextPageToken", nextPageToken) + .add("res", res) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequest.java index bceedcdcc..058dac8fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequest.java @@ -3,33 +3,37 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List Queries */ @Generated +@JsonSerialize(using = ListQueryHistoryRequest.ListQueryHistoryRequestSerializer.class) +@JsonDeserialize(using = ListQueryHistoryRequest.ListQueryHistoryRequestDeserializer.class) public class ListQueryHistoryRequest { /** A filter to limit query history results. This field is optional. */ - @JsonIgnore - @QueryParam("filter_by") private QueryFilter filterBy; /** * Whether to include the query metrics with each query. Only use this for a small subset of * queries (max_results). Defaults to false. */ - @JsonIgnore - @QueryParam("include_metrics") private Boolean includeMetrics; /** * Limit the number of results returned in one page. Must be less than 1000 and the default is * 100. */ - @JsonIgnore - @QueryParam("max_results") private Long maxResults; /** @@ -37,8 +41,6 @@ public class ListQueryHistoryRequest { * that need to be encoded before using it in a URL. For example, the character '+' needs to be * replaced by %2B. This field is optional. */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListQueryHistoryRequest setFilterBy(QueryFilter filterBy) { @@ -102,4 +104,47 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListQueryHistoryRequestPb toPb() { + ListQueryHistoryRequestPb pb = new ListQueryHistoryRequestPb(); + pb.setFilterBy(filterBy); + pb.setIncludeMetrics(includeMetrics); + pb.setMaxResults(maxResults); + pb.setPageToken(pageToken); + + return pb; + } + + static ListQueryHistoryRequest fromPb(ListQueryHistoryRequestPb pb) { + ListQueryHistoryRequest model = new ListQueryHistoryRequest(); + model.setFilterBy(pb.getFilterBy()); + model.setIncludeMetrics(pb.getIncludeMetrics()); + model.setMaxResults(pb.getMaxResults()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListQueryHistoryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListQueryHistoryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueryHistoryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueryHistoryRequestDeserializer + extends JsonDeserializer { + @Override + public ListQueryHistoryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueryHistoryRequestPb pb = mapper.readValue(p, ListQueryHistoryRequestPb.class); + return ListQueryHistoryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequestPb.java new file mode 100755 index 000000000..12d334fc7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryHistoryRequestPb.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Queries */ +@Generated +class ListQueryHistoryRequestPb { + @JsonIgnore + @QueryParam("filter_by") + private QueryFilter filterBy; + + @JsonIgnore + @QueryParam("include_metrics") + private Boolean includeMetrics; + + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListQueryHistoryRequestPb setFilterBy(QueryFilter filterBy) { + this.filterBy = filterBy; + return this; + } + + public QueryFilter getFilterBy() { + return filterBy; + } + + public ListQueryHistoryRequestPb setIncludeMetrics(Boolean includeMetrics) { + this.includeMetrics = includeMetrics; + return this; + } + + public Boolean getIncludeMetrics() { + return includeMetrics; + } + + public ListQueryHistoryRequestPb setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListQueryHistoryRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueryHistoryRequestPb that = (ListQueryHistoryRequestPb) o; + return Objects.equals(filterBy, that.filterBy) + && Objects.equals(includeMetrics, that.includeMetrics) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filterBy, includeMetrics, maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQueryHistoryRequestPb.class) + .add("filterBy", filterBy) + .add("includeMetrics", includeMetrics) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponse.java index af3f69497..00fc5a2a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponse.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListQueryObjectsResponse.ListQueryObjectsResponseSerializer.class) +@JsonDeserialize(using = ListQueryObjectsResponse.ListQueryObjectsResponseDeserializer.class) public class ListQueryObjectsResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("results") private Collection results; public ListQueryObjectsResponse setNextPageToken(String nextPageToken) { @@ -57,4 +66,43 @@ public String toString() { .add("results", results) .toString(); } + + ListQueryObjectsResponsePb toPb() { + ListQueryObjectsResponsePb pb = new ListQueryObjectsResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setResults(results); + + return pb; + } + + static ListQueryObjectsResponse fromPb(ListQueryObjectsResponsePb pb) { + ListQueryObjectsResponse model = new ListQueryObjectsResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListQueryObjectsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListQueryObjectsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueryObjectsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueryObjectsResponseDeserializer + extends JsonDeserializer { + @Override + public ListQueryObjectsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueryObjectsResponsePb pb = mapper.readValue(p, ListQueryObjectsResponsePb.class); + return ListQueryObjectsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponsePb.java new file mode 100755 index 000000000..0f676928f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListQueryObjectsResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("results") + private Collection results; + + public ListQueryObjectsResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQueryObjectsResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueryObjectsResponsePb that = (ListQueryObjectsResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, results); + } + + @Override + public String toString() { + return new ToStringer(ListQueryObjectsResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQuery.java index 1d3b1da0d..d61d70b77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQuery.java @@ -4,78 +4,74 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListQueryObjectsResponseQuery.ListQueryObjectsResponseQuerySerializer.class) +@JsonDeserialize( + using = ListQueryObjectsResponseQuery.ListQueryObjectsResponseQueryDeserializer.class) public class ListQueryObjectsResponseQuery { /** Whether to apply a 1000 row limit to the query result. */ - @JsonProperty("apply_auto_limit") private Boolean applyAutoLimit; /** Name of the catalog where this query will be executed. */ - @JsonProperty("catalog") private String catalog; /** Timestamp when this query was created. */ - @JsonProperty("create_time") private String createTime; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** * Display name of the query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying the query. */ - @JsonProperty("id") private String id; /** Username of the user who last saved changes to this query. */ - @JsonProperty("last_modifier_user_name") private String lastModifierUserName; /** Indicates whether the query is trashed. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** Username of the user that owns the query. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** List of query parameter definitions. */ - @JsonProperty("parameters") private Collection parameters; /** Text of the query to be run. */ - @JsonProperty("query_text") private String queryText; /** Sets the "Run as" role for the object. */ - @JsonProperty("run_as_mode") private RunAsMode runAsMode; /** Name of the schema where this query will be executed. */ - @JsonProperty("schema") private String schema; /** */ - @JsonProperty("tags") private Collection tags; /** Timestamp when this query was last updated. */ - @JsonProperty("update_time") private String updateTime; /** ID of the SQL warehouse attached to the query. */ - @JsonProperty("warehouse_id") private String warehouseId; public ListQueryObjectsResponseQuery setApplyAutoLimit(Boolean applyAutoLimit) { @@ -287,4 +283,72 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + ListQueryObjectsResponseQueryPb toPb() { + ListQueryObjectsResponseQueryPb pb = new ListQueryObjectsResponseQueryPb(); + pb.setApplyAutoLimit(applyAutoLimit); + pb.setCatalog(catalog); + pb.setCreateTime(createTime); + pb.setDescription(description); + pb.setDisplayName(displayName); + pb.setId(id); + pb.setLastModifierUserName(lastModifierUserName); + pb.setLifecycleState(lifecycleState); + pb.setOwnerUserName(ownerUserName); + pb.setParameters(parameters); + pb.setQueryText(queryText); + pb.setRunAsMode(runAsMode); + pb.setSchema(schema); + pb.setTags(tags); + pb.setUpdateTime(updateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static ListQueryObjectsResponseQuery fromPb(ListQueryObjectsResponseQueryPb pb) { + ListQueryObjectsResponseQuery model = new ListQueryObjectsResponseQuery(); + model.setApplyAutoLimit(pb.getApplyAutoLimit()); + model.setCatalog(pb.getCatalog()); + model.setCreateTime(pb.getCreateTime()); + model.setDescription(pb.getDescription()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + model.setLastModifierUserName(pb.getLastModifierUserName()); + model.setLifecycleState(pb.getLifecycleState()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setParameters(pb.getParameters()); + model.setQueryText(pb.getQueryText()); + model.setRunAsMode(pb.getRunAsMode()); + model.setSchema(pb.getSchema()); + model.setTags(pb.getTags()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class ListQueryObjectsResponseQuerySerializer + extends JsonSerializer { + @Override + public void serialize( + ListQueryObjectsResponseQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListQueryObjectsResponseQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListQueryObjectsResponseQueryDeserializer + extends JsonDeserializer { + @Override + public ListQueryObjectsResponseQuery deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListQueryObjectsResponseQueryPb pb = + mapper.readValue(p, ListQueryObjectsResponseQueryPb.class); + return ListQueryObjectsResponseQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQueryPb.java new file mode 100755 index 000000000..f9a73dd86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueryObjectsResponseQueryPb.java @@ -0,0 +1,270 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListQueryObjectsResponseQueryPb { + @JsonProperty("apply_auto_limit") + private Boolean applyAutoLimit; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("description") + private String description; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_modifier_user_name") + private String lastModifierUserName; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("run_as_mode") + private RunAsMode runAsMode; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public ListQueryObjectsResponseQueryPb setApplyAutoLimit(Boolean applyAutoLimit) { + this.applyAutoLimit = applyAutoLimit; + return this; + } + + public Boolean getApplyAutoLimit() { + return applyAutoLimit; + } + + public ListQueryObjectsResponseQueryPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public ListQueryObjectsResponseQueryPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public ListQueryObjectsResponseQueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ListQueryObjectsResponseQueryPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ListQueryObjectsResponseQueryPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListQueryObjectsResponseQueryPb setLastModifierUserName(String lastModifierUserName) { + this.lastModifierUserName = lastModifierUserName; + return this; + } + + public String getLastModifierUserName() { + return lastModifierUserName; + } + + public ListQueryObjectsResponseQueryPb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public ListQueryObjectsResponseQueryPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public ListQueryObjectsResponseQueryPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public ListQueryObjectsResponseQueryPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public ListQueryObjectsResponseQueryPb setRunAsMode(RunAsMode runAsMode) { + this.runAsMode = runAsMode; + return this; + } + + public RunAsMode getRunAsMode() { + return runAsMode; + } + + public ListQueryObjectsResponseQueryPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public ListQueryObjectsResponseQueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ListQueryObjectsResponseQueryPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public ListQueryObjectsResponseQueryPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQueryObjectsResponseQueryPb that = (ListQueryObjectsResponseQueryPb) o; + return Objects.equals(applyAutoLimit, that.applyAutoLimit) + && Objects.equals(catalog, that.catalog) + && Objects.equals(createTime, that.createTime) + && Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id) + && Objects.equals(lastModifierUserName, that.lastModifierUserName) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(parameters, that.parameters) + && Objects.equals(queryText, that.queryText) + && Objects.equals(runAsMode, that.runAsMode) + && Objects.equals(schema, that.schema) + && Objects.equals(tags, that.tags) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + applyAutoLimit, + catalog, + createTime, + description, + displayName, + id, + lastModifierUserName, + lifecycleState, + ownerUserName, + parameters, + queryText, + runAsMode, + schema, + tags, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(ListQueryObjectsResponseQueryPb.class) + .add("applyAutoLimit", applyAutoLimit) + .add("catalog", catalog) + .add("createTime", createTime) + .add("description", description) + .add("displayName", displayName) + .add("id", id) + .add("lastModifierUserName", lastModifierUserName) + .add("lifecycleState", lifecycleState) + .add("ownerUserName", ownerUserName) + .add("parameters", parameters) + .add("queryText", queryText) + .add("runAsMode", runAsMode) + .add("schema", schema) + .add("tags", tags) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponse.java index 6b6d6a07c..53eb13201 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponse.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListResponse.ListResponseSerializer.class) +@JsonDeserialize(using = ListResponse.ListResponseDeserializer.class) public class ListResponse { /** The total number of dashboards. */ - @JsonProperty("count") private Long count; /** The current page being displayed. */ - @JsonProperty("page") private Long page; /** The number of dashboards per page. */ - @JsonProperty("page_size") private Long pageSize; /** List of dashboards returned. */ - @JsonProperty("results") private Collection results; public ListResponse setCount(Long count) { @@ -87,4 +94,43 @@ public String toString() { .add("results", results) .toString(); } + + ListResponsePb toPb() { + ListResponsePb pb = new ListResponsePb(); + pb.setCount(count); + pb.setPage(page); + pb.setPageSize(pageSize); + pb.setResults(results); + + return pb; + } + + static ListResponse fromPb(ListResponsePb pb) { + ListResponse model = new ListResponse(); + model.setCount(pb.getCount()); + model.setPage(pb.getPage()); + model.setPageSize(pb.getPageSize()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListResponseDeserializer extends JsonDeserializer { + @Override + public ListResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListResponsePb pb = mapper.readValue(p, ListResponsePb.class); + return ListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponsePb.java new file mode 100755 index 000000000..2cf836c08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListResponsePb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListResponsePb { + @JsonProperty("count") + private Long count; + + @JsonProperty("page") + private Long page; + + @JsonProperty("page_size") + private Long pageSize; + + @JsonProperty("results") + private Collection results; + + public ListResponsePb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public ListResponsePb setPage(Long page) { + this.page = page; + return this; + } + + public Long getPage() { + return page; + } + + public ListResponsePb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListResponsePb that = (ListResponsePb) o; + return Objects.equals(count, that.count) + && Objects.equals(page, that.page) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(count, page, pageSize, results); + } + + @Override + public String toString() { + return new ToStringer(ListResponsePb.class) + .add("count", count) + .add("page", page) + .add("pageSize", pageSize) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequest.java index 335426cf4..36e7f33aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequest.java @@ -3,25 +3,33 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List visualizations on a query */ @Generated +@JsonSerialize( + using = ListVisualizationsForQueryRequest.ListVisualizationsForQueryRequestSerializer.class) +@JsonDeserialize( + using = ListVisualizationsForQueryRequest.ListVisualizationsForQueryRequestDeserializer.class) public class ListVisualizationsForQueryRequest { /** */ - @JsonIgnore private String id; + private String id; /** */ - @JsonIgnore - @QueryParam("page_size") private Long pageSize; /** */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListVisualizationsForQueryRequest setId(String id) { @@ -74,4 +82,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListVisualizationsForQueryRequestPb toPb() { + ListVisualizationsForQueryRequestPb pb = new ListVisualizationsForQueryRequestPb(); + pb.setId(id); + pb.setPageSize(pageSize); + pb.setPageToken(pageToken); + + return pb; + } + + static ListVisualizationsForQueryRequest fromPb(ListVisualizationsForQueryRequestPb pb) { + ListVisualizationsForQueryRequest model = new ListVisualizationsForQueryRequest(); + model.setId(pb.getId()); + model.setPageSize(pb.getPageSize()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListVisualizationsForQueryRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListVisualizationsForQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListVisualizationsForQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListVisualizationsForQueryRequestDeserializer + extends JsonDeserializer { + @Override + public ListVisualizationsForQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListVisualizationsForQueryRequestPb pb = + mapper.readValue(p, ListVisualizationsForQueryRequestPb.class); + return ListVisualizationsForQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequestPb.java new file mode 100755 index 000000000..f29a859c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryRequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List visualizations on a query */ +@Generated +class ListVisualizationsForQueryRequestPb { + @JsonIgnore private String id; + + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListVisualizationsForQueryRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListVisualizationsForQueryRequestPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListVisualizationsForQueryRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListVisualizationsForQueryRequestPb that = (ListVisualizationsForQueryRequestPb) o; + return Objects.equals(id, that.id) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(id, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListVisualizationsForQueryRequestPb.class) + .add("id", id) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponse.java index 6154a95b2..ab987ed4c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = ListVisualizationsForQueryResponse.ListVisualizationsForQueryResponseSerializer.class) +@JsonDeserialize( + using = ListVisualizationsForQueryResponse.ListVisualizationsForQueryResponseDeserializer.class) public class ListVisualizationsForQueryResponse { /** */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("results") private Collection results; public ListVisualizationsForQueryResponse setNextPageToken(String nextPageToken) { @@ -57,4 +68,44 @@ public String toString() { .add("results", results) .toString(); } + + ListVisualizationsForQueryResponsePb toPb() { + ListVisualizationsForQueryResponsePb pb = new ListVisualizationsForQueryResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setResults(results); + + return pb; + } + + static ListVisualizationsForQueryResponse fromPb(ListVisualizationsForQueryResponsePb pb) { + ListVisualizationsForQueryResponse model = new ListVisualizationsForQueryResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setResults(pb.getResults()); + + return model; + } + + public static class ListVisualizationsForQueryResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListVisualizationsForQueryResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListVisualizationsForQueryResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListVisualizationsForQueryResponseDeserializer + extends JsonDeserializer { + @Override + public ListVisualizationsForQueryResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListVisualizationsForQueryResponsePb pb = + mapper.readValue(p, ListVisualizationsForQueryResponsePb.class); + return ListVisualizationsForQueryResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponsePb.java new file mode 100755 index 000000000..c434b7d13 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListVisualizationsForQueryResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListVisualizationsForQueryResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("results") + private Collection results; + + public ListVisualizationsForQueryResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListVisualizationsForQueryResponsePb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListVisualizationsForQueryResponsePb that = (ListVisualizationsForQueryResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, results); + } + + @Override + public String toString() { + return new ToStringer(ListVisualizationsForQueryResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java index ae65d07db..f0412f1ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List warehouses */ @Generated +@JsonSerialize(using = ListWarehousesRequest.ListWarehousesRequestSerializer.class) +@JsonDeserialize(using = ListWarehousesRequest.ListWarehousesRequestDeserializer.class) public class ListWarehousesRequest { /** * Service Principal which will be used to fetch the list of warehouses. If not specified, the * user from the session header is used. */ - @JsonIgnore - @QueryParam("run_as_user_id") private Long runAsUserId; public ListWarehousesRequest setRunAsUserId(Long runAsUserId) { @@ -45,4 +53,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListWarehousesRequest.class).add("runAsUserId", runAsUserId).toString(); } + + ListWarehousesRequestPb toPb() { + ListWarehousesRequestPb pb = new ListWarehousesRequestPb(); + pb.setRunAsUserId(runAsUserId); + + return pb; + } + + static ListWarehousesRequest fromPb(ListWarehousesRequestPb pb) { + ListWarehousesRequest model = new ListWarehousesRequest(); + model.setRunAsUserId(pb.getRunAsUserId()); + + return model; + } + + public static class ListWarehousesRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ListWarehousesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListWarehousesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListWarehousesRequestDeserializer + extends JsonDeserializer { + @Override + public ListWarehousesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListWarehousesRequestPb pb = mapper.readValue(p, ListWarehousesRequestPb.class); + return ListWarehousesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequestPb.java new file mode 100755 index 000000000..f34d0108e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List warehouses */ +@Generated +class ListWarehousesRequestPb { + @JsonIgnore + @QueryParam("run_as_user_id") + private Long runAsUserId; + + public ListWarehousesRequestPb setRunAsUserId(Long runAsUserId) { + this.runAsUserId = runAsUserId; + return this; + } + + public Long getRunAsUserId() { + return runAsUserId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWarehousesRequestPb that = (ListWarehousesRequestPb) o; + return Objects.equals(runAsUserId, that.runAsUserId); + } + + @Override + public int hashCode() { + return Objects.hash(runAsUserId); + } + + @Override + public String toString() { + return new ToStringer(ListWarehousesRequestPb.class).add("runAsUserId", runAsUserId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java index 53eb8c282..1c6f2c942 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListWarehousesResponse.ListWarehousesResponseSerializer.class) +@JsonDeserialize(using = ListWarehousesResponse.ListWarehousesResponseDeserializer.class) public class ListWarehousesResponse { /** A list of warehouses and their configurations. */ - @JsonProperty("warehouses") private Collection warehouses; public ListWarehousesResponse setWarehouses(Collection warehouses) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListWarehousesResponse.class).add("warehouses", warehouses).toString(); } + + ListWarehousesResponsePb toPb() { + ListWarehousesResponsePb pb = new ListWarehousesResponsePb(); + pb.setWarehouses(warehouses); + + return pb; + } + + static ListWarehousesResponse fromPb(ListWarehousesResponsePb pb) { + ListWarehousesResponse model = new ListWarehousesResponse(); + model.setWarehouses(pb.getWarehouses()); + + return model; + } + + public static class ListWarehousesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListWarehousesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListWarehousesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListWarehousesResponseDeserializer + extends JsonDeserializer { + @Override + public ListWarehousesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListWarehousesResponsePb pb = mapper.readValue(p, ListWarehousesResponsePb.class); + return ListWarehousesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponsePb.java new file mode 100755 index 000000000..797ac717a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListWarehousesResponsePb { + @JsonProperty("warehouses") + private Collection warehouses; + + public ListWarehousesResponsePb setWarehouses(Collection warehouses) { + this.warehouses = warehouses; + return this; + } + + public Collection getWarehouses() { + return warehouses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWarehousesResponsePb that = (ListWarehousesResponsePb) o; + return Objects.equals(warehouses, that.warehouses); + } + + @Override + public int hashCode() { + return Objects.hash(warehouses); + } + + @Override + public String toString() { + return new ToStringer(ListWarehousesResponsePb.class).add("warehouses", warehouses).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptions.java index f01ee3228..35729cbb3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptions.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MultiValuesOptions.MultiValuesOptionsSerializer.class) +@JsonDeserialize(using = MultiValuesOptions.MultiValuesOptionsDeserializer.class) public class MultiValuesOptions { /** Character that prefixes each selected parameter value. */ - @JsonProperty("prefix") private String prefix; /** Character that separates each selected parameter value. Defaults to a comma. */ - @JsonProperty("separator") private String separator; /** Character that suffixes each selected parameter value. */ - @JsonProperty("suffix") private String suffix; public MultiValuesOptions setPrefix(String prefix) { @@ -71,4 +79,42 @@ public String toString() { .add("suffix", suffix) .toString(); } + + MultiValuesOptionsPb toPb() { + MultiValuesOptionsPb pb = new MultiValuesOptionsPb(); + pb.setPrefix(prefix); + pb.setSeparator(separator); + pb.setSuffix(suffix); + + return pb; + } + + static MultiValuesOptions fromPb(MultiValuesOptionsPb pb) { + MultiValuesOptions model = new MultiValuesOptions(); + model.setPrefix(pb.getPrefix()); + model.setSeparator(pb.getSeparator()); + model.setSuffix(pb.getSuffix()); + + return model; + } + + public static class MultiValuesOptionsSerializer extends JsonSerializer { + @Override + public void serialize(MultiValuesOptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MultiValuesOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MultiValuesOptionsDeserializer extends JsonDeserializer { + @Override + public MultiValuesOptions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MultiValuesOptionsPb pb = mapper.readValue(p, MultiValuesOptionsPb.class); + return MultiValuesOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptionsPb.java new file mode 100755 index 000000000..3d7e89188 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/MultiValuesOptionsPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MultiValuesOptionsPb { + @JsonProperty("prefix") + private String prefix; + + @JsonProperty("separator") + private String separator; + + @JsonProperty("suffix") + private String suffix; + + public MultiValuesOptionsPb setPrefix(String prefix) { + this.prefix = prefix; + return this; + } + + public String getPrefix() { + return prefix; + } + + public MultiValuesOptionsPb setSeparator(String separator) { + this.separator = separator; + return this; + } + + public String getSeparator() { + return separator; + } + + public MultiValuesOptionsPb setSuffix(String suffix) { + this.suffix = suffix; + return this; + } + + public String getSuffix() { + return suffix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MultiValuesOptionsPb that = (MultiValuesOptionsPb) o; + return Objects.equals(prefix, that.prefix) + && Objects.equals(separator, that.separator) + && Objects.equals(suffix, that.suffix); + } + + @Override + public int hashCode() { + return Objects.hash(prefix, separator, suffix); + } + + @Override + public String toString() { + return new ToStringer(MultiValuesOptionsPb.class) + .add("prefix", prefix) + .add("separator", separator) + .add("suffix", suffix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValue.java index cf9dde0b1..3166428d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValue.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = NumericValue.NumericValueSerializer.class) +@JsonDeserialize(using = NumericValue.NumericValueDeserializer.class) public class NumericValue { /** */ - @JsonProperty("value") private Double value; public NumericValue setValue(Double value) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(NumericValue.class).add("value", value).toString(); } + + NumericValuePb toPb() { + NumericValuePb pb = new NumericValuePb(); + pb.setValue(value); + + return pb; + } + + static NumericValue fromPb(NumericValuePb pb) { + NumericValue model = new NumericValue(); + model.setValue(pb.getValue()); + + return model; + } + + public static class NumericValueSerializer extends JsonSerializer { + @Override + public void serialize(NumericValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + NumericValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class NumericValueDeserializer extends JsonDeserializer { + @Override + public NumericValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + NumericValuePb pb = mapper.readValue(p, NumericValuePb.class); + return NumericValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValuePb.java new file mode 100755 index 000000000..1227350c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/NumericValuePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class NumericValuePb { + @JsonProperty("value") + private Double value; + + public NumericValuePb setValue(Double value) { + this.value = value; + return this; + } + + public Double getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NumericValuePb that = (NumericValuePb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(NumericValuePb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParams.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParams.java index 46aa1fa53..72a128b3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParams.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParams.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = OdbcParams.OdbcParamsSerializer.class) +@JsonDeserialize(using = OdbcParams.OdbcParamsDeserializer.class) public class OdbcParams { /** */ - @JsonProperty("hostname") private String hostname; /** */ - @JsonProperty("path") private String path; /** */ - @JsonProperty("port") private Long port; /** */ - @JsonProperty("protocol") private String protocol; public OdbcParams setHostname(String hostname) { @@ -86,4 +93,43 @@ public String toString() { .add("protocol", protocol) .toString(); } + + OdbcParamsPb toPb() { + OdbcParamsPb pb = new OdbcParamsPb(); + pb.setHostname(hostname); + pb.setPath(path); + pb.setPort(port); + pb.setProtocol(protocol); + + return pb; + } + + static OdbcParams fromPb(OdbcParamsPb pb) { + OdbcParams model = new OdbcParams(); + model.setHostname(pb.getHostname()); + model.setPath(pb.getPath()); + model.setPort(pb.getPort()); + model.setProtocol(pb.getProtocol()); + + return model; + } + + public static class OdbcParamsSerializer extends JsonSerializer { + @Override + public void serialize(OdbcParams value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + OdbcParamsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class OdbcParamsDeserializer extends JsonDeserializer { + @Override + public OdbcParams deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + OdbcParamsPb pb = mapper.readValue(p, OdbcParamsPb.class); + return OdbcParams.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParamsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParamsPb.java new file mode 100755 index 000000000..d9cdc4bef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/OdbcParamsPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class OdbcParamsPb { + @JsonProperty("hostname") + private String hostname; + + @JsonProperty("path") + private String path; + + @JsonProperty("port") + private Long port; + + @JsonProperty("protocol") + private String protocol; + + public OdbcParamsPb setHostname(String hostname) { + this.hostname = hostname; + return this; + } + + public String getHostname() { + return hostname; + } + + public OdbcParamsPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public OdbcParamsPb setPort(Long port) { + this.port = port; + return this; + } + + public Long getPort() { + return port; + } + + public OdbcParamsPb setProtocol(String protocol) { + this.protocol = protocol; + return this; + } + + public String getProtocol() { + return protocol; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OdbcParamsPb that = (OdbcParamsPb) o; + return Objects.equals(hostname, that.hostname) + && Objects.equals(path, that.path) + && Objects.equals(port, that.port) + && Objects.equals(protocol, that.protocol); + } + + @Override + public int hashCode() { + return Objects.hash(hostname, path, port, protocol); + } + + @Override + public String toString() { + return new ToStringer(OdbcParamsPb.class) + .add("hostname", hostname) + .add("path", path) + .add("port", port) + .add("protocol", protocol) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Parameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Parameter.java index 199428c98..b275f2079 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Parameter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Parameter.java @@ -4,45 +4,49 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Parameter.ParameterSerializer.class) +@JsonDeserialize(using = Parameter.ParameterDeserializer.class) public class Parameter { /** * List of valid parameter values, newline delimited. Only applies for dropdown list parameters. */ - @JsonProperty("enumOptions") private String enumOptions; /** * If specified, allows multiple values to be selected for this parameter. Only applies to * dropdown list and query-based dropdown list parameters. */ - @JsonProperty("multiValuesOptions") private MultiValuesOptions multiValuesOptions; /** The literal parameter marker that appears between double curly braces in the query text. */ - @JsonProperty("name") private String name; /** * The UUID of the query that provides the parameter values. Only applies for query-based dropdown * list parameters. */ - @JsonProperty("queryId") private String queryId; /** The text displayed in a parameter picking widget. */ - @JsonProperty("title") private String title; /** Parameters can have several different types. */ - @JsonProperty("type") private ParameterType typeValue; /** The default value for this parameter. */ - @JsonProperty("value") private Object value; public Parameter setEnumOptions(String enumOptions) { @@ -139,4 +143,49 @@ public String toString() { .add("value", value) .toString(); } + + ParameterPb toPb() { + ParameterPb pb = new ParameterPb(); + pb.setEnumOptions(enumOptions); + pb.setMultiValuesOptions(multiValuesOptions); + pb.setName(name); + pb.setQueryId(queryId); + pb.setTitle(title); + pb.setType(typeValue); + pb.setValue(value); + + return pb; + } + + static Parameter fromPb(ParameterPb pb) { + Parameter model = new Parameter(); + model.setEnumOptions(pb.getEnumOptions()); + model.setMultiValuesOptions(pb.getMultiValuesOptions()); + model.setName(pb.getName()); + model.setQueryId(pb.getQueryId()); + model.setTitle(pb.getTitle()); + model.setType(pb.getType()); + model.setValue(pb.getValue()); + + return model; + } + + public static class ParameterSerializer extends JsonSerializer { + @Override + public void serialize(Parameter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ParameterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ParameterDeserializer extends JsonDeserializer { + @Override + public Parameter deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ParameterPb pb = mapper.readValue(p, ParameterPb.class); + return Parameter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ParameterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ParameterPb.java new file mode 100755 index 000000000..bf03b3c67 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ParameterPb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ParameterPb { + @JsonProperty("enumOptions") + private String enumOptions; + + @JsonProperty("multiValuesOptions") + private MultiValuesOptions multiValuesOptions; + + @JsonProperty("name") + private String name; + + @JsonProperty("queryId") + private String queryId; + + @JsonProperty("title") + private String title; + + @JsonProperty("type") + private ParameterType typeValue; + + @JsonProperty("value") + private Object value; + + public ParameterPb setEnumOptions(String enumOptions) { + this.enumOptions = enumOptions; + return this; + } + + public String getEnumOptions() { + return enumOptions; + } + + public ParameterPb setMultiValuesOptions(MultiValuesOptions multiValuesOptions) { + this.multiValuesOptions = multiValuesOptions; + return this; + } + + public MultiValuesOptions getMultiValuesOptions() { + return multiValuesOptions; + } + + public ParameterPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ParameterPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public ParameterPb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + public ParameterPb setType(ParameterType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ParameterType getType() { + return typeValue; + } + + public ParameterPb setValue(Object value) { + this.value = value; + return this; + } + + public Object getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ParameterPb that = (ParameterPb) o; + return Objects.equals(enumOptions, that.enumOptions) + && Objects.equals(multiValuesOptions, that.multiValuesOptions) + && Objects.equals(name, that.name) + && Objects.equals(queryId, that.queryId) + && Objects.equals(title, that.title) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(enumOptions, multiValuesOptions, name, queryId, title, typeValue, value); + } + + @Override + public String toString() { + return new ToStringer(ParameterPb.class) + .add("enumOptions", enumOptions) + .add("multiValuesOptions", multiValuesOptions) + .add("name", name) + .add("queryId", queryId) + .add("title", title) + .add("typeValue", typeValue) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java index 27f88fa53..1935e7f69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java @@ -21,7 +21,7 @@ public Query create(CreateQueryRequest request) { String path = "/api/2.0/sql/queries"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Query.class); @@ -35,7 +35,7 @@ public void delete(TrashQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, Empty.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public Query get(GetQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, Query.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListQueryObjectsResponse list(ListQueriesRequest request) { String path = "/api/2.0/sql/queries"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListQueryObjectsResponse.class); } catch (IOException e) { @@ -75,7 +75,7 @@ public ListVisualizationsForQueryResponse listVisualizations( String path = String.format("/api/2.0/sql/queries/%s/visualizations", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListVisualizationsForQueryResponse.class); } catch (IOException e) { @@ -88,7 +88,7 @@ public Query update(UpdateQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Query.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java index cce4fb4eb..fef2833ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java @@ -21,7 +21,7 @@ public LegacyQuery create(QueryPostContent request) { String path = "/api/2.0/preview/sql/queries"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LegacyQuery.class); @@ -35,7 +35,7 @@ public void delete(DeleteQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public LegacyQuery get(GetQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, LegacyQuery.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public QueryList list(ListQueriesLegacyRequest request) { String path = "/api/2.0/preview/sql/queries"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, QueryList.class); } catch (IOException e) { @@ -74,7 +74,7 @@ public void restore(RestoreQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/trash/%s", request.getQueryId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, RestoreResponse.class); } catch (IOException e) { @@ -87,7 +87,7 @@ public LegacyQuery update(QueryEditContent request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LegacyQuery.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java index 0b60ec358..f23321d2b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java @@ -4,82 +4,76 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Query.QuerySerializer.class) +@JsonDeserialize(using = Query.QueryDeserializer.class) public class Query { /** Whether to apply a 1000 row limit to the query result. */ - @JsonProperty("apply_auto_limit") private Boolean applyAutoLimit; /** Name of the catalog where this query will be executed. */ - @JsonProperty("catalog") private String catalog; /** Timestamp when this query was created. */ - @JsonProperty("create_time") private String createTime; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** * Display name of the query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying the query. */ - @JsonProperty("id") private String id; /** Username of the user who last saved changes to this query. */ - @JsonProperty("last_modifier_user_name") private String lastModifierUserName; /** Indicates whether the query is trashed. */ - @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; /** Username of the user that owns the query. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** List of query parameter definitions. */ - @JsonProperty("parameters") private Collection parameters; /** Workspace path of the workspace folder containing the object. */ - @JsonProperty("parent_path") private String parentPath; /** Text of the query to be run. */ - @JsonProperty("query_text") private String queryText; /** Sets the "Run as" role for the object. */ - @JsonProperty("run_as_mode") private RunAsMode runAsMode; /** Name of the schema where this query will be executed. */ - @JsonProperty("schema") private String schema; /** */ - @JsonProperty("tags") private Collection tags; /** Timestamp when this query was last updated. */ - @JsonProperty("update_time") private String updateTime; /** ID of the SQL warehouse attached to the query. */ - @JsonProperty("warehouse_id") private String warehouseId; public Query setApplyAutoLimit(Boolean applyAutoLimit) { @@ -303,4 +297,69 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + QueryPb toPb() { + QueryPb pb = new QueryPb(); + pb.setApplyAutoLimit(applyAutoLimit); + pb.setCatalog(catalog); + pb.setCreateTime(createTime); + pb.setDescription(description); + pb.setDisplayName(displayName); + pb.setId(id); + pb.setLastModifierUserName(lastModifierUserName); + pb.setLifecycleState(lifecycleState); + pb.setOwnerUserName(ownerUserName); + pb.setParameters(parameters); + pb.setParentPath(parentPath); + pb.setQueryText(queryText); + pb.setRunAsMode(runAsMode); + pb.setSchema(schema); + pb.setTags(tags); + pb.setUpdateTime(updateTime); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static Query fromPb(QueryPb pb) { + Query model = new Query(); + model.setApplyAutoLimit(pb.getApplyAutoLimit()); + model.setCatalog(pb.getCatalog()); + model.setCreateTime(pb.getCreateTime()); + model.setDescription(pb.getDescription()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + model.setLastModifierUserName(pb.getLastModifierUserName()); + model.setLifecycleState(pb.getLifecycleState()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setParameters(pb.getParameters()); + model.setParentPath(pb.getParentPath()); + model.setQueryText(pb.getQueryText()); + model.setRunAsMode(pb.getRunAsMode()); + model.setSchema(pb.getSchema()); + model.setTags(pb.getTags()); + model.setUpdateTime(pb.getUpdateTime()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class QuerySerializer extends JsonSerializer { + @Override + public void serialize(Query value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryDeserializer extends JsonDeserializer { + @Override + public Query deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryPb pb = mapper.readValue(p, QueryPb.class); + return Query.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValue.java index 5624f9f64..177efd776 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValue.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryBackedValue.QueryBackedValueSerializer.class) +@JsonDeserialize(using = QueryBackedValue.QueryBackedValueDeserializer.class) public class QueryBackedValue { /** If specified, allows multiple values to be selected for this parameter. */ - @JsonProperty("multi_values_options") private MultiValuesOptions multiValuesOptions; /** UUID of the query that provides the parameter values. */ - @JsonProperty("query_id") private String queryId; /** List of selected query parameter values. */ - @JsonProperty("values") private Collection values; public QueryBackedValue setMultiValuesOptions(MultiValuesOptions multiValuesOptions) { @@ -72,4 +80,42 @@ public String toString() { .add("values", values) .toString(); } + + QueryBackedValuePb toPb() { + QueryBackedValuePb pb = new QueryBackedValuePb(); + pb.setMultiValuesOptions(multiValuesOptions); + pb.setQueryId(queryId); + pb.setValues(values); + + return pb; + } + + static QueryBackedValue fromPb(QueryBackedValuePb pb) { + QueryBackedValue model = new QueryBackedValue(); + model.setMultiValuesOptions(pb.getMultiValuesOptions()); + model.setQueryId(pb.getQueryId()); + model.setValues(pb.getValues()); + + return model; + } + + public static class QueryBackedValueSerializer extends JsonSerializer { + @Override + public void serialize(QueryBackedValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryBackedValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryBackedValueDeserializer extends JsonDeserializer { + @Override + public QueryBackedValue deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryBackedValuePb pb = mapper.readValue(p, QueryBackedValuePb.class); + return QueryBackedValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValuePb.java new file mode 100755 index 000000000..c24e13161 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryBackedValuePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryBackedValuePb { + @JsonProperty("multi_values_options") + private MultiValuesOptions multiValuesOptions; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("values") + private Collection values; + + public QueryBackedValuePb setMultiValuesOptions(MultiValuesOptions multiValuesOptions) { + this.multiValuesOptions = multiValuesOptions; + return this; + } + + public MultiValuesOptions getMultiValuesOptions() { + return multiValuesOptions; + } + + public QueryBackedValuePb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public QueryBackedValuePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryBackedValuePb that = (QueryBackedValuePb) o; + return Objects.equals(multiValuesOptions, that.multiValuesOptions) + && Objects.equals(queryId, that.queryId) + && Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(multiValuesOptions, queryId, values); + } + + @Override + public String toString() { + return new ToStringer(QueryBackedValuePb.class) + .add("multiValuesOptions", multiValuesOptions) + .add("queryId", queryId) + .add("values", values) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContent.java index 254467ce5..f69ebfccf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContent.java @@ -4,12 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryEditContent.QueryEditContentSerializer.class) +@JsonDeserialize(using = QueryEditContent.QueryEditContentDeserializer.class) public class QueryEditContent { /** * Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -17,17 +27,14 @@ public class QueryEditContent { * *

[Learn more]: https://docs.databricks.com/api/workspace/datasources/list */ - @JsonProperty("data_source_id") private String dataSourceId; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** The title of this query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("name") private String name; /** @@ -35,25 +42,21 @@ public class QueryEditContent { * `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. * It can be overridden at runtime. */ - @JsonProperty("options") private Object options; /** The text of the query to be run. */ - @JsonProperty("query") private String query; /** */ - @JsonIgnore private String queryId; + private String queryId; /** * Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as * viewer" behavior) or `"owner"` (signifying "run as owner" behavior) */ - @JsonProperty("run_as_role") private RunAsRole runAsRole; /** */ - @JsonProperty("tags") private Collection tags; public QueryEditContent setDataSourceId(String dataSourceId) { @@ -161,4 +164,52 @@ public String toString() { .add("tags", tags) .toString(); } + + QueryEditContentPb toPb() { + QueryEditContentPb pb = new QueryEditContentPb(); + pb.setDataSourceId(dataSourceId); + pb.setDescription(description); + pb.setName(name); + pb.setOptions(options); + pb.setQuery(query); + pb.setQueryId(queryId); + pb.setRunAsRole(runAsRole); + pb.setTags(tags); + + return pb; + } + + static QueryEditContent fromPb(QueryEditContentPb pb) { + QueryEditContent model = new QueryEditContent(); + model.setDataSourceId(pb.getDataSourceId()); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setQuery(pb.getQuery()); + model.setQueryId(pb.getQueryId()); + model.setRunAsRole(pb.getRunAsRole()); + model.setTags(pb.getTags()); + + return model; + } + + public static class QueryEditContentSerializer extends JsonSerializer { + @Override + public void serialize(QueryEditContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryEditContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryEditContentDeserializer extends JsonDeserializer { + @Override + public QueryEditContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryEditContentPb pb = mapper.readValue(p, QueryEditContentPb.class); + return QueryEditContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContentPb.java new file mode 100755 index 000000000..28c3297d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryEditContentPb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryEditContentPb { + @JsonProperty("data_source_id") + private String dataSourceId; + + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Object options; + + @JsonProperty("query") + private String query; + + @JsonIgnore private String queryId; + + @JsonProperty("run_as_role") + private RunAsRole runAsRole; + + @JsonProperty("tags") + private Collection tags; + + public QueryEditContentPb setDataSourceId(String dataSourceId) { + this.dataSourceId = dataSourceId; + return this; + } + + public String getDataSourceId() { + return dataSourceId; + } + + public QueryEditContentPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public QueryEditContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public QueryEditContentPb setOptions(Object options) { + this.options = options; + return this; + } + + public Object getOptions() { + return options; + } + + public QueryEditContentPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public QueryEditContentPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public QueryEditContentPb setRunAsRole(RunAsRole runAsRole) { + this.runAsRole = runAsRole; + return this; + } + + public RunAsRole getRunAsRole() { + return runAsRole; + } + + public QueryEditContentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryEditContentPb that = (QueryEditContentPb) o; + return Objects.equals(dataSourceId, that.dataSourceId) + && Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(query, that.query) + && Objects.equals(queryId, that.queryId) + && Objects.equals(runAsRole, that.runAsRole) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(dataSourceId, description, name, options, query, queryId, runAsRole, tags); + } + + @Override + public String toString() { + return new ToStringer(QueryEditContentPb.class) + .add("dataSourceId", dataSourceId) + .add("description", description) + .add("name", name) + .add("options", options) + .add("query", query) + .add("queryId", queryId) + .add("runAsRole", runAsRole) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilter.java index 6299765ff..05001f2b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilter.java @@ -3,37 +3,37 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryFilter.QueryFilterSerializer.class) +@JsonDeserialize(using = QueryFilter.QueryFilterDeserializer.class) public class QueryFilter { /** A range filter for query submitted time. The time range must be <= 30 days. */ - @JsonProperty("query_start_time_range") - @QueryParam("query_start_time_range") private TimeRange queryStartTimeRange; /** A list of statement IDs. */ - @JsonProperty("statement_ids") - @QueryParam("statement_ids") private Collection statementIds; /** */ - @JsonProperty("statuses") - @QueryParam("statuses") private Collection statuses; /** A list of user IDs who ran the queries. */ - @JsonProperty("user_ids") - @QueryParam("user_ids") private Collection userIds; /** A list of warehouse IDs. */ - @JsonProperty("warehouse_ids") - @QueryParam("warehouse_ids") private Collection warehouseIds; public QueryFilter setQueryStartTimeRange(TimeRange queryStartTimeRange) { @@ -108,4 +108,45 @@ public String toString() { .add("warehouseIds", warehouseIds) .toString(); } + + QueryFilterPb toPb() { + QueryFilterPb pb = new QueryFilterPb(); + pb.setQueryStartTimeRange(queryStartTimeRange); + pb.setStatementIds(statementIds); + pb.setStatuses(statuses); + pb.setUserIds(userIds); + pb.setWarehouseIds(warehouseIds); + + return pb; + } + + static QueryFilter fromPb(QueryFilterPb pb) { + QueryFilter model = new QueryFilter(); + model.setQueryStartTimeRange(pb.getQueryStartTimeRange()); + model.setStatementIds(pb.getStatementIds()); + model.setStatuses(pb.getStatuses()); + model.setUserIds(pb.getUserIds()); + model.setWarehouseIds(pb.getWarehouseIds()); + + return model; + } + + public static class QueryFilterSerializer extends JsonSerializer { + @Override + public void serialize(QueryFilter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryFilterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryFilterDeserializer extends JsonDeserializer { + @Override + public QueryFilter deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryFilterPb pb = mapper.readValue(p, QueryFilterPb.class); + return QueryFilter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilterPb.java new file mode 100755 index 000000000..5c9efe73d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryFilterPb.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryFilterPb { + @JsonProperty("query_start_time_range") + @QueryParam("query_start_time_range") + private TimeRange queryStartTimeRange; + + @JsonProperty("statement_ids") + @QueryParam("statement_ids") + private Collection statementIds; + + @JsonProperty("statuses") + @QueryParam("statuses") + private Collection statuses; + + @JsonProperty("user_ids") + @QueryParam("user_ids") + private Collection userIds; + + @JsonProperty("warehouse_ids") + @QueryParam("warehouse_ids") + private Collection warehouseIds; + + public QueryFilterPb setQueryStartTimeRange(TimeRange queryStartTimeRange) { + this.queryStartTimeRange = queryStartTimeRange; + return this; + } + + public TimeRange getQueryStartTimeRange() { + return queryStartTimeRange; + } + + public QueryFilterPb setStatementIds(Collection statementIds) { + this.statementIds = statementIds; + return this; + } + + public Collection getStatementIds() { + return statementIds; + } + + public QueryFilterPb setStatuses(Collection statuses) { + this.statuses = statuses; + return this; + } + + public Collection getStatuses() { + return statuses; + } + + public QueryFilterPb setUserIds(Collection userIds) { + this.userIds = userIds; + return this; + } + + public Collection getUserIds() { + return userIds; + } + + public QueryFilterPb setWarehouseIds(Collection warehouseIds) { + this.warehouseIds = warehouseIds; + return this; + } + + public Collection getWarehouseIds() { + return warehouseIds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryFilterPb that = (QueryFilterPb) o; + return Objects.equals(queryStartTimeRange, that.queryStartTimeRange) + && Objects.equals(statementIds, that.statementIds) + && Objects.equals(statuses, that.statuses) + && Objects.equals(userIds, that.userIds) + && Objects.equals(warehouseIds, that.warehouseIds); + } + + @Override + public int hashCode() { + return Objects.hash(queryStartTimeRange, statementIds, statuses, userIds, warehouseIds); + } + + @Override + public String toString() { + return new ToStringer(QueryFilterPb.class) + .add("queryStartTimeRange", queryStartTimeRange) + .add("statementIds", statementIds) + .add("statuses", statuses) + .add("userIds", userIds) + .add("warehouseIds", warehouseIds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java index 52f2deb6a..f05a879cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java @@ -21,7 +21,7 @@ public ListQueriesResponse list(ListQueryHistoryRequest request) { String path = "/api/2.0/sql/history/queries"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListQueriesResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java index da10c9e75..3ce9fcbf4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QueryInfo.QueryInfoSerializer.class) +@JsonDeserialize(using = QueryInfo.QueryInfoDeserializer.class) public class QueryInfo { /** SQL Warehouse channel information at the time of query execution */ - @JsonProperty("channel_used") private ChannelInfo channelUsed; /** @@ -18,55 +28,42 @@ public class QueryInfo { * Power BI. This field is derived from information provided by client applications. While values * are expected to remain static over time, this cannot be guaranteed. */ - @JsonProperty("client_application") private String clientApplication; /** Total execution time of the statement ( excluding result fetch time ). */ - @JsonProperty("duration") private Long duration; /** Alias for `warehouse_id`. */ - @JsonProperty("endpoint_id") private String endpointId; /** Message describing why the query could not complete. */ - @JsonProperty("error_message") private String errorMessage; /** The ID of the user whose credentials were used to run the query. */ - @JsonProperty("executed_as_user_id") private Long executedAsUserId; /** The email address or username of the user whose credentials were used to run the query. */ - @JsonProperty("executed_as_user_name") private String executedAsUserName; /** The time execution of the query ended. */ - @JsonProperty("execution_end_time_ms") private Long executionEndTimeMs; /** Whether more updates for the query are expected. */ - @JsonProperty("is_final") private Boolean isFinal; /** A key that can be used to look up query details. */ - @JsonProperty("lookup_key") private String lookupKey; /** Metrics about query execution. */ - @JsonProperty("metrics") private QueryMetrics metrics; /** Whether plans exist for the execution, or the reason why they are missing */ - @JsonProperty("plans_state") private PlansState plansState; /** The time the query ended. */ - @JsonProperty("query_end_time_ms") private Long queryEndTimeMs; /** The query ID. */ - @JsonProperty("query_id") private String queryId; /** @@ -74,27 +71,21 @@ public class QueryInfo { * the execution of this statement, such as jobs, notebooks, or dashboards. This field only * records Databricks entities. */ - @JsonProperty("query_source") private ExternalQuerySource querySource; /** The time the query started. */ - @JsonProperty("query_start_time_ms") private Long queryStartTimeMs; /** The text of the query. */ - @JsonProperty("query_text") private String queryText; /** The number of results returned by the query. */ - @JsonProperty("rows_produced") private Long rowsProduced; /** URL to the Spark UI query plan. */ - @JsonProperty("spark_ui_url") private String sparkUiUrl; /** Type of statement for this query */ - @JsonProperty("statement_type") private QueryStatementType statementType; /** @@ -104,19 +95,15 @@ public class QueryInfo { * `CANCELED`: Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: * Query has completed. */ - @JsonProperty("status") private QueryStatus status; /** The ID of the user who ran the query. */ - @JsonProperty("user_id") private Long userId; /** The email address or username of the user who ran the query. */ - @JsonProperty("user_name") private String userName; /** Warehouse ID. */ - @JsonProperty("warehouse_id") private String warehouseId; public QueryInfo setChannelUsed(ChannelInfo channelUsed) { @@ -424,4 +411,83 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + QueryInfoPb toPb() { + QueryInfoPb pb = new QueryInfoPb(); + pb.setChannelUsed(channelUsed); + pb.setClientApplication(clientApplication); + pb.setDuration(duration); + pb.setEndpointId(endpointId); + pb.setErrorMessage(errorMessage); + pb.setExecutedAsUserId(executedAsUserId); + pb.setExecutedAsUserName(executedAsUserName); + pb.setExecutionEndTimeMs(executionEndTimeMs); + pb.setIsFinal(isFinal); + pb.setLookupKey(lookupKey); + pb.setMetrics(metrics); + pb.setPlansState(plansState); + pb.setQueryEndTimeMs(queryEndTimeMs); + pb.setQueryId(queryId); + pb.setQuerySource(querySource); + pb.setQueryStartTimeMs(queryStartTimeMs); + pb.setQueryText(queryText); + pb.setRowsProduced(rowsProduced); + pb.setSparkUiUrl(sparkUiUrl); + pb.setStatementType(statementType); + pb.setStatus(status); + pb.setUserId(userId); + pb.setUserName(userName); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static QueryInfo fromPb(QueryInfoPb pb) { + QueryInfo model = new QueryInfo(); + model.setChannelUsed(pb.getChannelUsed()); + model.setClientApplication(pb.getClientApplication()); + model.setDuration(pb.getDuration()); + model.setEndpointId(pb.getEndpointId()); + model.setErrorMessage(pb.getErrorMessage()); + model.setExecutedAsUserId(pb.getExecutedAsUserId()); + model.setExecutedAsUserName(pb.getExecutedAsUserName()); + model.setExecutionEndTimeMs(pb.getExecutionEndTimeMs()); + model.setIsFinal(pb.getIsFinal()); + model.setLookupKey(pb.getLookupKey()); + model.setMetrics(pb.getMetrics()); + model.setPlansState(pb.getPlansState()); + model.setQueryEndTimeMs(pb.getQueryEndTimeMs()); + model.setQueryId(pb.getQueryId()); + model.setQuerySource(pb.getQuerySource()); + model.setQueryStartTimeMs(pb.getQueryStartTimeMs()); + model.setQueryText(pb.getQueryText()); + model.setRowsProduced(pb.getRowsProduced()); + model.setSparkUiUrl(pb.getSparkUiUrl()); + model.setStatementType(pb.getStatementType()); + model.setStatus(pb.getStatus()); + model.setUserId(pb.getUserId()); + model.setUserName(pb.getUserName()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class QueryInfoSerializer extends JsonSerializer { + @Override + public void serialize(QueryInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryInfoDeserializer extends JsonDeserializer { + @Override + public QueryInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryInfoPb pb = mapper.readValue(p, QueryInfoPb.class); + return QueryInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfoPb.java new file mode 100755 index 000000000..2ef4df338 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfoPb.java @@ -0,0 +1,389 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QueryInfoPb { + @JsonProperty("channel_used") + private ChannelInfo channelUsed; + + @JsonProperty("client_application") + private String clientApplication; + + @JsonProperty("duration") + private Long duration; + + @JsonProperty("endpoint_id") + private String endpointId; + + @JsonProperty("error_message") + private String errorMessage; + + @JsonProperty("executed_as_user_id") + private Long executedAsUserId; + + @JsonProperty("executed_as_user_name") + private String executedAsUserName; + + @JsonProperty("execution_end_time_ms") + private Long executionEndTimeMs; + + @JsonProperty("is_final") + private Boolean isFinal; + + @JsonProperty("lookup_key") + private String lookupKey; + + @JsonProperty("metrics") + private QueryMetrics metrics; + + @JsonProperty("plans_state") + private PlansState plansState; + + @JsonProperty("query_end_time_ms") + private Long queryEndTimeMs; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("query_source") + private ExternalQuerySource querySource; + + @JsonProperty("query_start_time_ms") + private Long queryStartTimeMs; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("rows_produced") + private Long rowsProduced; + + @JsonProperty("spark_ui_url") + private String sparkUiUrl; + + @JsonProperty("statement_type") + private QueryStatementType statementType; + + @JsonProperty("status") + private QueryStatus status; + + @JsonProperty("user_id") + private Long userId; + + @JsonProperty("user_name") + private String userName; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public QueryInfoPb setChannelUsed(ChannelInfo channelUsed) { + this.channelUsed = channelUsed; + return this; + } + + public ChannelInfo getChannelUsed() { + return channelUsed; + } + + public QueryInfoPb setClientApplication(String clientApplication) { + this.clientApplication = clientApplication; + return this; + } + + public String getClientApplication() { + return clientApplication; + } + + public QueryInfoPb setDuration(Long duration) { + this.duration = duration; + return this; + } + + public Long getDuration() { + return duration; + } + + public QueryInfoPb setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public QueryInfoPb setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public QueryInfoPb setExecutedAsUserId(Long executedAsUserId) { + this.executedAsUserId = executedAsUserId; + return this; + } + + public Long getExecutedAsUserId() { + return executedAsUserId; + } + + public QueryInfoPb setExecutedAsUserName(String executedAsUserName) { + this.executedAsUserName = executedAsUserName; + return this; + } + + public String getExecutedAsUserName() { + return executedAsUserName; + } + + public QueryInfoPb setExecutionEndTimeMs(Long executionEndTimeMs) { + this.executionEndTimeMs = executionEndTimeMs; + return this; + } + + public Long getExecutionEndTimeMs() { + return executionEndTimeMs; + } + + public QueryInfoPb setIsFinal(Boolean isFinal) { + this.isFinal = isFinal; + return this; + } + + public Boolean getIsFinal() { + return isFinal; + } + + public QueryInfoPb setLookupKey(String lookupKey) { + this.lookupKey = lookupKey; + return this; + } + + public String getLookupKey() { + return lookupKey; + } + + public QueryInfoPb setMetrics(QueryMetrics metrics) { + this.metrics = metrics; + return this; + } + + public QueryMetrics getMetrics() { + return metrics; + } + + public QueryInfoPb setPlansState(PlansState plansState) { + this.plansState = plansState; + return this; + } + + public PlansState getPlansState() { + return plansState; + } + + public QueryInfoPb setQueryEndTimeMs(Long queryEndTimeMs) { + this.queryEndTimeMs = queryEndTimeMs; + return this; + } + + public Long getQueryEndTimeMs() { + return queryEndTimeMs; + } + + public QueryInfoPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public QueryInfoPb setQuerySource(ExternalQuerySource querySource) { + this.querySource = querySource; + return this; + } + + public ExternalQuerySource getQuerySource() { + return querySource; + } + + public QueryInfoPb setQueryStartTimeMs(Long queryStartTimeMs) { + this.queryStartTimeMs = queryStartTimeMs; + return this; + } + + public Long getQueryStartTimeMs() { + return queryStartTimeMs; + } + + public QueryInfoPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public QueryInfoPb setRowsProduced(Long rowsProduced) { + this.rowsProduced = rowsProduced; + return this; + } + + public Long getRowsProduced() { + return rowsProduced; + } + + public QueryInfoPb setSparkUiUrl(String sparkUiUrl) { + this.sparkUiUrl = sparkUiUrl; + return this; + } + + public String getSparkUiUrl() { + return sparkUiUrl; + } + + public QueryInfoPb setStatementType(QueryStatementType statementType) { + this.statementType = statementType; + return this; + } + + public QueryStatementType getStatementType() { + return statementType; + } + + public QueryInfoPb setStatus(QueryStatus status) { + this.status = status; + return this; + } + + public QueryStatus getStatus() { + return status; + } + + public QueryInfoPb setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + public QueryInfoPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + public QueryInfoPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryInfoPb that = (QueryInfoPb) o; + return Objects.equals(channelUsed, that.channelUsed) + && Objects.equals(clientApplication, that.clientApplication) + && Objects.equals(duration, that.duration) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(executedAsUserId, that.executedAsUserId) + && Objects.equals(executedAsUserName, that.executedAsUserName) + && Objects.equals(executionEndTimeMs, that.executionEndTimeMs) + && Objects.equals(isFinal, that.isFinal) + && Objects.equals(lookupKey, that.lookupKey) + && Objects.equals(metrics, that.metrics) + && Objects.equals(plansState, that.plansState) + && Objects.equals(queryEndTimeMs, that.queryEndTimeMs) + && Objects.equals(queryId, that.queryId) + && Objects.equals(querySource, that.querySource) + && Objects.equals(queryStartTimeMs, that.queryStartTimeMs) + && Objects.equals(queryText, that.queryText) + && Objects.equals(rowsProduced, that.rowsProduced) + && Objects.equals(sparkUiUrl, that.sparkUiUrl) + && Objects.equals(statementType, that.statementType) + && Objects.equals(status, that.status) + && Objects.equals(userId, that.userId) + && Objects.equals(userName, that.userName) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + channelUsed, + clientApplication, + duration, + endpointId, + errorMessage, + executedAsUserId, + executedAsUserName, + executionEndTimeMs, + isFinal, + lookupKey, + metrics, + plansState, + queryEndTimeMs, + queryId, + querySource, + queryStartTimeMs, + queryText, + rowsProduced, + sparkUiUrl, + statementType, + status, + userId, + userName, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(QueryInfoPb.class) + .add("channelUsed", channelUsed) + .add("clientApplication", clientApplication) + .add("duration", duration) + .add("endpointId", endpointId) + .add("errorMessage", errorMessage) + .add("executedAsUserId", executedAsUserId) + .add("executedAsUserName", executedAsUserName) + .add("executionEndTimeMs", executionEndTimeMs) + .add("isFinal", isFinal) + .add("lookupKey", lookupKey) + .add("metrics", metrics) + .add("plansState", plansState) + .add("queryEndTimeMs", queryEndTimeMs) + .add("queryId", queryId) + .add("querySource", querySource) + .add("queryStartTimeMs", queryStartTimeMs) + .add("queryText", queryText) + .add("rowsProduced", rowsProduced) + .add("sparkUiUrl", sparkUiUrl) + .add("statementType", statementType) + .add("status", status) + .add("userId", userId) + .add("userName", userName) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryList.java index 5c4149540..f8985ddce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryList.java @@ -4,26 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryList.QueryListSerializer.class) +@JsonDeserialize(using = QueryList.QueryListDeserializer.class) public class QueryList { /** The total number of queries. */ - @JsonProperty("count") private Long count; /** The page number that is currently displayed. */ - @JsonProperty("page") private Long page; /** The number of queries per page. */ - @JsonProperty("page_size") private Long pageSize; /** List of queries returned. */ - @JsonProperty("results") private Collection results; public QueryList setCount(Long count) { @@ -87,4 +94,43 @@ public String toString() { .add("results", results) .toString(); } + + QueryListPb toPb() { + QueryListPb pb = new QueryListPb(); + pb.setCount(count); + pb.setPage(page); + pb.setPageSize(pageSize); + pb.setResults(results); + + return pb; + } + + static QueryList fromPb(QueryListPb pb) { + QueryList model = new QueryList(); + model.setCount(pb.getCount()); + model.setPage(pb.getPage()); + model.setPageSize(pb.getPageSize()); + model.setResults(pb.getResults()); + + return model; + } + + public static class QueryListSerializer extends JsonSerializer { + @Override + public void serialize(QueryList value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryListPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryListDeserializer extends JsonDeserializer { + @Override + public QueryList deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryListPb pb = mapper.readValue(p, QueryListPb.class); + return QueryList.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryListPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryListPb.java new file mode 100755 index 000000000..072869009 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryListPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryListPb { + @JsonProperty("count") + private Long count; + + @JsonProperty("page") + private Long page; + + @JsonProperty("page_size") + private Long pageSize; + + @JsonProperty("results") + private Collection results; + + public QueryListPb setCount(Long count) { + this.count = count; + return this; + } + + public Long getCount() { + return count; + } + + public QueryListPb setPage(Long page) { + this.page = page; + return this; + } + + public Long getPage() { + return page; + } + + public QueryListPb setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public QueryListPb setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryListPb that = (QueryListPb) o; + return Objects.equals(count, that.count) + && Objects.equals(page, that.page) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(count, page, pageSize, results); + } + + @Override + public String toString() { + return new ToStringer(QueryListPb.class) + .add("count", count) + .add("page", page) + .add("pageSize", pageSize) + .add("results", results) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java index 208b22c9f..ccb390811 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,31 +21,28 @@ * driver and are stored in the history service database. */ @Generated +@JsonSerialize(using = QueryMetrics.QueryMetricsSerializer.class) +@JsonDeserialize(using = QueryMetrics.QueryMetricsDeserializer.class) public class QueryMetrics { /** Time spent loading metadata and optimizing the query, in milliseconds. */ - @JsonProperty("compilation_time_ms") private Long compilationTimeMs; /** Time spent executing the query, in milliseconds. */ - @JsonProperty("execution_time_ms") private Long executionTimeMs; /** Total amount of data sent over the network between executor nodes during shuffle, in bytes. */ - @JsonProperty("network_sent_bytes") private Long networkSentBytes; /** * Timestamp of when the query was enqueued waiting while the warehouse was at max load. This * field is optional and will not appear if the query skipped the overloading queue. */ - @JsonProperty("overloading_queue_start_timestamp") private Long overloadingQueueStartTimestamp; /** * Total execution time for all individual Photon query engine tasks in the query, in * milliseconds. */ - @JsonProperty("photon_total_time_ms") private Long photonTotalTimeMs; /** @@ -44,71 +50,60 @@ public class QueryMetrics { * warehouse. This field is optional and will not appear if the query skipped the provisioning * queue. */ - @JsonProperty("provisioning_queue_start_timestamp") private Long provisioningQueueStartTimestamp; /** Total number of bytes in all tables not read due to pruning */ - @JsonProperty("pruned_bytes") private Long prunedBytes; /** Total number of files from all tables not read due to pruning */ - @JsonProperty("pruned_files_count") private Long prunedFilesCount; /** Timestamp of when the underlying compute started compilation of the query. */ - @JsonProperty("query_compilation_start_timestamp") private Long queryCompilationStartTimestamp; /** Total size of data read by the query, in bytes. */ - @JsonProperty("read_bytes") private Long readBytes; /** Size of persistent data read from the cache, in bytes. */ - @JsonProperty("read_cache_bytes") private Long readCacheBytes; /** Number of files read after pruning */ - @JsonProperty("read_files_count") private Long readFilesCount; /** Number of partitions read after pruning. */ - @JsonProperty("read_partitions_count") private Long readPartitionsCount; /** Size of persistent data read from cloud object storage on your cloud tenant, in bytes. */ - @JsonProperty("read_remote_bytes") private Long readRemoteBytes; /** Time spent fetching the query results after the execution finished, in milliseconds. */ - @JsonProperty("result_fetch_time_ms") private Long resultFetchTimeMs; /** `true` if the query result was fetched from cache, `false` otherwise. */ - @JsonProperty("result_from_cache") private Boolean resultFromCache; /** Total number of rows returned by the query. */ - @JsonProperty("rows_produced_count") private Long rowsProducedCount; /** Total number of rows read by the query. */ - @JsonProperty("rows_read_count") private Long rowsReadCount; /** Size of data temporarily written to disk while executing the query, in bytes. */ - @JsonProperty("spill_to_disk_bytes") private Long spillToDiskBytes; + /** + * sum of task times completed in a range of wall clock time, approximated to a configurable + * number of points aggregated over all stages and jobs in the query (based on task_total_time_ms) + */ + private TaskTimeOverRange taskTimeOverTimeRange; + /** Sum of execution time for all of the query’s tasks, in milliseconds. */ - @JsonProperty("task_total_time_ms") private Long taskTotalTimeMs; /** Total execution time of the query from the client’s point of view, in milliseconds. */ - @JsonProperty("total_time_ms") private Long totalTimeMs; /** Size pf persistent data written to cloud object storage in your cloud tenant, in bytes. */ - @JsonProperty("write_remote_bytes") private Long writeRemoteBytes; public QueryMetrics setCompilationTimeMs(Long compilationTimeMs) { @@ -282,6 +277,15 @@ public Long getSpillToDiskBytes() { return spillToDiskBytes; } + public QueryMetrics setTaskTimeOverTimeRange(TaskTimeOverRange taskTimeOverTimeRange) { + this.taskTimeOverTimeRange = taskTimeOverTimeRange; + return this; + } + + public TaskTimeOverRange getTaskTimeOverTimeRange() { + return taskTimeOverTimeRange; + } + public QueryMetrics setTaskTotalTimeMs(Long taskTotalTimeMs) { this.taskTotalTimeMs = taskTotalTimeMs; return this; @@ -333,6 +337,7 @@ public boolean equals(Object o) { && Objects.equals(rowsProducedCount, that.rowsProducedCount) && Objects.equals(rowsReadCount, that.rowsReadCount) && Objects.equals(spillToDiskBytes, that.spillToDiskBytes) + && Objects.equals(taskTimeOverTimeRange, that.taskTimeOverTimeRange) && Objects.equals(taskTotalTimeMs, that.taskTotalTimeMs) && Objects.equals(totalTimeMs, that.totalTimeMs) && Objects.equals(writeRemoteBytes, that.writeRemoteBytes); @@ -360,6 +365,7 @@ public int hashCode() { rowsProducedCount, rowsReadCount, spillToDiskBytes, + taskTimeOverTimeRange, taskTotalTimeMs, totalTimeMs, writeRemoteBytes); @@ -387,9 +393,87 @@ public String toString() { .add("rowsProducedCount", rowsProducedCount) .add("rowsReadCount", rowsReadCount) .add("spillToDiskBytes", spillToDiskBytes) + .add("taskTimeOverTimeRange", taskTimeOverTimeRange) .add("taskTotalTimeMs", taskTotalTimeMs) .add("totalTimeMs", totalTimeMs) .add("writeRemoteBytes", writeRemoteBytes) .toString(); } + + QueryMetricsPb toPb() { + QueryMetricsPb pb = new QueryMetricsPb(); + pb.setCompilationTimeMs(compilationTimeMs); + pb.setExecutionTimeMs(executionTimeMs); + pb.setNetworkSentBytes(networkSentBytes); + pb.setOverloadingQueueStartTimestamp(overloadingQueueStartTimestamp); + pb.setPhotonTotalTimeMs(photonTotalTimeMs); + pb.setProvisioningQueueStartTimestamp(provisioningQueueStartTimestamp); + pb.setPrunedBytes(prunedBytes); + pb.setPrunedFilesCount(prunedFilesCount); + pb.setQueryCompilationStartTimestamp(queryCompilationStartTimestamp); + pb.setReadBytes(readBytes); + pb.setReadCacheBytes(readCacheBytes); + pb.setReadFilesCount(readFilesCount); + pb.setReadPartitionsCount(readPartitionsCount); + pb.setReadRemoteBytes(readRemoteBytes); + pb.setResultFetchTimeMs(resultFetchTimeMs); + pb.setResultFromCache(resultFromCache); + pb.setRowsProducedCount(rowsProducedCount); + pb.setRowsReadCount(rowsReadCount); + pb.setSpillToDiskBytes(spillToDiskBytes); + pb.setTaskTimeOverTimeRange(taskTimeOverTimeRange); + pb.setTaskTotalTimeMs(taskTotalTimeMs); + pb.setTotalTimeMs(totalTimeMs); + pb.setWriteRemoteBytes(writeRemoteBytes); + + return pb; + } + + static QueryMetrics fromPb(QueryMetricsPb pb) { + QueryMetrics model = new QueryMetrics(); + model.setCompilationTimeMs(pb.getCompilationTimeMs()); + model.setExecutionTimeMs(pb.getExecutionTimeMs()); + model.setNetworkSentBytes(pb.getNetworkSentBytes()); + model.setOverloadingQueueStartTimestamp(pb.getOverloadingQueueStartTimestamp()); + model.setPhotonTotalTimeMs(pb.getPhotonTotalTimeMs()); + model.setProvisioningQueueStartTimestamp(pb.getProvisioningQueueStartTimestamp()); + model.setPrunedBytes(pb.getPrunedBytes()); + model.setPrunedFilesCount(pb.getPrunedFilesCount()); + model.setQueryCompilationStartTimestamp(pb.getQueryCompilationStartTimestamp()); + model.setReadBytes(pb.getReadBytes()); + model.setReadCacheBytes(pb.getReadCacheBytes()); + model.setReadFilesCount(pb.getReadFilesCount()); + model.setReadPartitionsCount(pb.getReadPartitionsCount()); + model.setReadRemoteBytes(pb.getReadRemoteBytes()); + model.setResultFetchTimeMs(pb.getResultFetchTimeMs()); + model.setResultFromCache(pb.getResultFromCache()); + model.setRowsProducedCount(pb.getRowsProducedCount()); + model.setRowsReadCount(pb.getRowsReadCount()); + model.setSpillToDiskBytes(pb.getSpillToDiskBytes()); + model.setTaskTimeOverTimeRange(pb.getTaskTimeOverTimeRange()); + model.setTaskTotalTimeMs(pb.getTaskTotalTimeMs()); + model.setTotalTimeMs(pb.getTotalTimeMs()); + model.setWriteRemoteBytes(pb.getWriteRemoteBytes()); + + return model; + } + + public static class QueryMetricsSerializer extends JsonSerializer { + @Override + public void serialize(QueryMetrics value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryMetricsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryMetricsDeserializer extends JsonDeserializer { + @Override + public QueryMetrics deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryMetricsPb pb = mapper.readValue(p, QueryMetricsPb.class); + return QueryMetrics.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetricsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetricsPb.java new file mode 100755 index 000000000..d2a193e24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetricsPb.java @@ -0,0 +1,378 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A query metric that encapsulates a set of measurements for a single query. Metrics come from the + * driver and are stored in the history service database. + */ +@Generated +class QueryMetricsPb { + @JsonProperty("compilation_time_ms") + private Long compilationTimeMs; + + @JsonProperty("execution_time_ms") + private Long executionTimeMs; + + @JsonProperty("network_sent_bytes") + private Long networkSentBytes; + + @JsonProperty("overloading_queue_start_timestamp") + private Long overloadingQueueStartTimestamp; + + @JsonProperty("photon_total_time_ms") + private Long photonTotalTimeMs; + + @JsonProperty("provisioning_queue_start_timestamp") + private Long provisioningQueueStartTimestamp; + + @JsonProperty("pruned_bytes") + private Long prunedBytes; + + @JsonProperty("pruned_files_count") + private Long prunedFilesCount; + + @JsonProperty("query_compilation_start_timestamp") + private Long queryCompilationStartTimestamp; + + @JsonProperty("read_bytes") + private Long readBytes; + + @JsonProperty("read_cache_bytes") + private Long readCacheBytes; + + @JsonProperty("read_files_count") + private Long readFilesCount; + + @JsonProperty("read_partitions_count") + private Long readPartitionsCount; + + @JsonProperty("read_remote_bytes") + private Long readRemoteBytes; + + @JsonProperty("result_fetch_time_ms") + private Long resultFetchTimeMs; + + @JsonProperty("result_from_cache") + private Boolean resultFromCache; + + @JsonProperty("rows_produced_count") + private Long rowsProducedCount; + + @JsonProperty("rows_read_count") + private Long rowsReadCount; + + @JsonProperty("spill_to_disk_bytes") + private Long spillToDiskBytes; + + @JsonProperty("task_time_over_time_range") + private TaskTimeOverRange taskTimeOverTimeRange; + + @JsonProperty("task_total_time_ms") + private Long taskTotalTimeMs; + + @JsonProperty("total_time_ms") + private Long totalTimeMs; + + @JsonProperty("write_remote_bytes") + private Long writeRemoteBytes; + + public QueryMetricsPb setCompilationTimeMs(Long compilationTimeMs) { + this.compilationTimeMs = compilationTimeMs; + return this; + } + + public Long getCompilationTimeMs() { + return compilationTimeMs; + } + + public QueryMetricsPb setExecutionTimeMs(Long executionTimeMs) { + this.executionTimeMs = executionTimeMs; + return this; + } + + public Long getExecutionTimeMs() { + return executionTimeMs; + } + + public QueryMetricsPb setNetworkSentBytes(Long networkSentBytes) { + this.networkSentBytes = networkSentBytes; + return this; + } + + public Long getNetworkSentBytes() { + return networkSentBytes; + } + + public QueryMetricsPb setOverloadingQueueStartTimestamp(Long overloadingQueueStartTimestamp) { + this.overloadingQueueStartTimestamp = overloadingQueueStartTimestamp; + return this; + } + + public Long getOverloadingQueueStartTimestamp() { + return overloadingQueueStartTimestamp; + } + + public QueryMetricsPb setPhotonTotalTimeMs(Long photonTotalTimeMs) { + this.photonTotalTimeMs = photonTotalTimeMs; + return this; + } + + public Long getPhotonTotalTimeMs() { + return photonTotalTimeMs; + } + + public QueryMetricsPb setProvisioningQueueStartTimestamp(Long provisioningQueueStartTimestamp) { + this.provisioningQueueStartTimestamp = provisioningQueueStartTimestamp; + return this; + } + + public Long getProvisioningQueueStartTimestamp() { + return provisioningQueueStartTimestamp; + } + + public QueryMetricsPb setPrunedBytes(Long prunedBytes) { + this.prunedBytes = prunedBytes; + return this; + } + + public Long getPrunedBytes() { + return prunedBytes; + } + + public QueryMetricsPb setPrunedFilesCount(Long prunedFilesCount) { + this.prunedFilesCount = prunedFilesCount; + return this; + } + + public Long getPrunedFilesCount() { + return prunedFilesCount; + } + + public QueryMetricsPb setQueryCompilationStartTimestamp(Long queryCompilationStartTimestamp) { + this.queryCompilationStartTimestamp = queryCompilationStartTimestamp; + return this; + } + + public Long getQueryCompilationStartTimestamp() { + return queryCompilationStartTimestamp; + } + + public QueryMetricsPb setReadBytes(Long readBytes) { + this.readBytes = readBytes; + return this; + } + + public Long getReadBytes() { + return readBytes; + } + + public QueryMetricsPb setReadCacheBytes(Long readCacheBytes) { + this.readCacheBytes = readCacheBytes; + return this; + } + + public Long getReadCacheBytes() { + return readCacheBytes; + } + + public QueryMetricsPb setReadFilesCount(Long readFilesCount) { + this.readFilesCount = readFilesCount; + return this; + } + + public Long getReadFilesCount() { + return readFilesCount; + } + + public QueryMetricsPb setReadPartitionsCount(Long readPartitionsCount) { + this.readPartitionsCount = readPartitionsCount; + return this; + } + + public Long getReadPartitionsCount() { + return readPartitionsCount; + } + + public QueryMetricsPb setReadRemoteBytes(Long readRemoteBytes) { + this.readRemoteBytes = readRemoteBytes; + return this; + } + + public Long getReadRemoteBytes() { + return readRemoteBytes; + } + + public QueryMetricsPb setResultFetchTimeMs(Long resultFetchTimeMs) { + this.resultFetchTimeMs = resultFetchTimeMs; + return this; + } + + public Long getResultFetchTimeMs() { + return resultFetchTimeMs; + } + + public QueryMetricsPb setResultFromCache(Boolean resultFromCache) { + this.resultFromCache = resultFromCache; + return this; + } + + public Boolean getResultFromCache() { + return resultFromCache; + } + + public QueryMetricsPb setRowsProducedCount(Long rowsProducedCount) { + this.rowsProducedCount = rowsProducedCount; + return this; + } + + public Long getRowsProducedCount() { + return rowsProducedCount; + } + + public QueryMetricsPb setRowsReadCount(Long rowsReadCount) { + this.rowsReadCount = rowsReadCount; + return this; + } + + public Long getRowsReadCount() { + return rowsReadCount; + } + + public QueryMetricsPb setSpillToDiskBytes(Long spillToDiskBytes) { + this.spillToDiskBytes = spillToDiskBytes; + return this; + } + + public Long getSpillToDiskBytes() { + return spillToDiskBytes; + } + + public QueryMetricsPb setTaskTimeOverTimeRange(TaskTimeOverRange taskTimeOverTimeRange) { + this.taskTimeOverTimeRange = taskTimeOverTimeRange; + return this; + } + + public TaskTimeOverRange getTaskTimeOverTimeRange() { + return taskTimeOverTimeRange; + } + + public QueryMetricsPb setTaskTotalTimeMs(Long taskTotalTimeMs) { + this.taskTotalTimeMs = taskTotalTimeMs; + return this; + } + + public Long getTaskTotalTimeMs() { + return taskTotalTimeMs; + } + + public QueryMetricsPb setTotalTimeMs(Long totalTimeMs) { + this.totalTimeMs = totalTimeMs; + return this; + } + + public Long getTotalTimeMs() { + return totalTimeMs; + } + + public QueryMetricsPb setWriteRemoteBytes(Long writeRemoteBytes) { + this.writeRemoteBytes = writeRemoteBytes; + return this; + } + + public Long getWriteRemoteBytes() { + return writeRemoteBytes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryMetricsPb that = (QueryMetricsPb) o; + return Objects.equals(compilationTimeMs, that.compilationTimeMs) + && Objects.equals(executionTimeMs, that.executionTimeMs) + && Objects.equals(networkSentBytes, that.networkSentBytes) + && Objects.equals(overloadingQueueStartTimestamp, that.overloadingQueueStartTimestamp) + && Objects.equals(photonTotalTimeMs, that.photonTotalTimeMs) + && Objects.equals(provisioningQueueStartTimestamp, that.provisioningQueueStartTimestamp) + && Objects.equals(prunedBytes, that.prunedBytes) + && Objects.equals(prunedFilesCount, that.prunedFilesCount) + && Objects.equals(queryCompilationStartTimestamp, that.queryCompilationStartTimestamp) + && Objects.equals(readBytes, that.readBytes) + && Objects.equals(readCacheBytes, that.readCacheBytes) + && Objects.equals(readFilesCount, that.readFilesCount) + && Objects.equals(readPartitionsCount, that.readPartitionsCount) + && Objects.equals(readRemoteBytes, that.readRemoteBytes) + && Objects.equals(resultFetchTimeMs, that.resultFetchTimeMs) + && Objects.equals(resultFromCache, that.resultFromCache) + && Objects.equals(rowsProducedCount, that.rowsProducedCount) + && Objects.equals(rowsReadCount, that.rowsReadCount) + && Objects.equals(spillToDiskBytes, that.spillToDiskBytes) + && Objects.equals(taskTimeOverTimeRange, that.taskTimeOverTimeRange) + && Objects.equals(taskTotalTimeMs, that.taskTotalTimeMs) + && Objects.equals(totalTimeMs, that.totalTimeMs) + && Objects.equals(writeRemoteBytes, that.writeRemoteBytes); + } + + @Override + public int hashCode() { + return Objects.hash( + compilationTimeMs, + executionTimeMs, + networkSentBytes, + overloadingQueueStartTimestamp, + photonTotalTimeMs, + provisioningQueueStartTimestamp, + prunedBytes, + prunedFilesCount, + queryCompilationStartTimestamp, + readBytes, + readCacheBytes, + readFilesCount, + readPartitionsCount, + readRemoteBytes, + resultFetchTimeMs, + resultFromCache, + rowsProducedCount, + rowsReadCount, + spillToDiskBytes, + taskTimeOverTimeRange, + taskTotalTimeMs, + totalTimeMs, + writeRemoteBytes); + } + + @Override + public String toString() { + return new ToStringer(QueryMetricsPb.class) + .add("compilationTimeMs", compilationTimeMs) + .add("executionTimeMs", executionTimeMs) + .add("networkSentBytes", networkSentBytes) + .add("overloadingQueueStartTimestamp", overloadingQueueStartTimestamp) + .add("photonTotalTimeMs", photonTotalTimeMs) + .add("provisioningQueueStartTimestamp", provisioningQueueStartTimestamp) + .add("prunedBytes", prunedBytes) + .add("prunedFilesCount", prunedFilesCount) + .add("queryCompilationStartTimestamp", queryCompilationStartTimestamp) + .add("readBytes", readBytes) + .add("readCacheBytes", readCacheBytes) + .add("readFilesCount", readFilesCount) + .add("readPartitionsCount", readPartitionsCount) + .add("readRemoteBytes", readRemoteBytes) + .add("resultFetchTimeMs", resultFetchTimeMs) + .add("resultFromCache", resultFromCache) + .add("rowsProducedCount", rowsProducedCount) + .add("rowsReadCount", rowsReadCount) + .add("spillToDiskBytes", spillToDiskBytes) + .add("taskTimeOverTimeRange", taskTimeOverTimeRange) + .add("taskTotalTimeMs", taskTotalTimeMs) + .add("totalTimeMs", totalTimeMs) + .add("writeRemoteBytes", writeRemoteBytes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptions.java index 92553c026..333865245 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptions.java @@ -4,29 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryOptions.QueryOptionsSerializer.class) +@JsonDeserialize(using = QueryOptions.QueryOptionsDeserializer.class) public class QueryOptions { /** The name of the catalog to execute this query in. */ - @JsonProperty("catalog") private String catalog; /** * The timestamp when this query was moved to trash. Only present when the `is_archived` property * is `true`. Trashed items are deleted after thirty days. */ - @JsonProperty("moved_to_trash_at") private String movedToTrashAt; /** */ - @JsonProperty("parameters") private Collection parameters; /** The name of the schema to execute this query in. */ - @JsonProperty("schema") private String schema; public QueryOptions setCatalog(String catalog) { @@ -90,4 +97,43 @@ public String toString() { .add("schema", schema) .toString(); } + + QueryOptionsPb toPb() { + QueryOptionsPb pb = new QueryOptionsPb(); + pb.setCatalog(catalog); + pb.setMovedToTrashAt(movedToTrashAt); + pb.setParameters(parameters); + pb.setSchema(schema); + + return pb; + } + + static QueryOptions fromPb(QueryOptionsPb pb) { + QueryOptions model = new QueryOptions(); + model.setCatalog(pb.getCatalog()); + model.setMovedToTrashAt(pb.getMovedToTrashAt()); + model.setParameters(pb.getParameters()); + model.setSchema(pb.getSchema()); + + return model; + } + + public static class QueryOptionsSerializer extends JsonSerializer { + @Override + public void serialize(QueryOptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryOptionsDeserializer extends JsonDeserializer { + @Override + public QueryOptions deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryOptionsPb pb = mapper.readValue(p, QueryOptionsPb.class); + return QueryOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptionsPb.java new file mode 100755 index 000000000..f97c37a1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptionsPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryOptionsPb { + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("moved_to_trash_at") + private String movedToTrashAt; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("schema") + private String schema; + + public QueryOptionsPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public QueryOptionsPb setMovedToTrashAt(String movedToTrashAt) { + this.movedToTrashAt = movedToTrashAt; + return this; + } + + public String getMovedToTrashAt() { + return movedToTrashAt; + } + + public QueryOptionsPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public QueryOptionsPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryOptionsPb that = (QueryOptionsPb) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(movedToTrashAt, that.movedToTrashAt) + && Objects.equals(parameters, that.parameters) + && Objects.equals(schema, that.schema); + } + + @Override + public int hashCode() { + return Objects.hash(catalog, movedToTrashAt, parameters, schema); + } + + @Override + public String toString() { + return new ToStringer(QueryOptionsPb.class) + .add("catalog", catalog) + .add("movedToTrashAt", movedToTrashAt) + .add("parameters", parameters) + .add("schema", schema) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameter.java index d6ea41be6..bba1dc13e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameter.java @@ -4,44 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QueryParameter.QueryParameterSerializer.class) +@JsonDeserialize(using = QueryParameter.QueryParameterDeserializer.class) public class QueryParameter { /** * Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or * `date_range_value`. */ - @JsonProperty("date_range_value") private DateRangeValue dateRangeValue; /** Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`. */ - @JsonProperty("date_value") private DateValue dateValue; /** Dropdown query parameter value. */ - @JsonProperty("enum_value") private EnumValue enumValue; /** Literal parameter marker that appears between double curly braces in the query text. */ - @JsonProperty("name") private String name; /** Numeric query parameter value. */ - @JsonProperty("numeric_value") private NumericValue numericValue; /** Query-based dropdown query parameter value. */ - @JsonProperty("query_backed_value") private QueryBackedValue queryBackedValue; /** Text query parameter value. */ - @JsonProperty("text_value") private TextValue textValue; /** Text displayed in the user-facing parameter widget in the UI. */ - @JsonProperty("title") private String title; public QueryParameter setDateRangeValue(DateRangeValue dateRangeValue) { @@ -157,4 +160,52 @@ public String toString() { .add("title", title) .toString(); } + + QueryParameterPb toPb() { + QueryParameterPb pb = new QueryParameterPb(); + pb.setDateRangeValue(dateRangeValue); + pb.setDateValue(dateValue); + pb.setEnumValue(enumValue); + pb.setName(name); + pb.setNumericValue(numericValue); + pb.setQueryBackedValue(queryBackedValue); + pb.setTextValue(textValue); + pb.setTitle(title); + + return pb; + } + + static QueryParameter fromPb(QueryParameterPb pb) { + QueryParameter model = new QueryParameter(); + model.setDateRangeValue(pb.getDateRangeValue()); + model.setDateValue(pb.getDateValue()); + model.setEnumValue(pb.getEnumValue()); + model.setName(pb.getName()); + model.setNumericValue(pb.getNumericValue()); + model.setQueryBackedValue(pb.getQueryBackedValue()); + model.setTextValue(pb.getTextValue()); + model.setTitle(pb.getTitle()); + + return model; + } + + public static class QueryParameterSerializer extends JsonSerializer { + @Override + public void serialize(QueryParameter value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryParameterPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryParameterDeserializer extends JsonDeserializer { + @Override + public QueryParameter deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryParameterPb pb = mapper.readValue(p, QueryParameterPb.class); + return QueryParameter.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameterPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameterPb.java new file mode 100755 index 000000000..e90b94632 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameterPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QueryParameterPb { + @JsonProperty("date_range_value") + private DateRangeValue dateRangeValue; + + @JsonProperty("date_value") + private DateValue dateValue; + + @JsonProperty("enum_value") + private EnumValue enumValue; + + @JsonProperty("name") + private String name; + + @JsonProperty("numeric_value") + private NumericValue numericValue; + + @JsonProperty("query_backed_value") + private QueryBackedValue queryBackedValue; + + @JsonProperty("text_value") + private TextValue textValue; + + @JsonProperty("title") + private String title; + + public QueryParameterPb setDateRangeValue(DateRangeValue dateRangeValue) { + this.dateRangeValue = dateRangeValue; + return this; + } + + public DateRangeValue getDateRangeValue() { + return dateRangeValue; + } + + public QueryParameterPb setDateValue(DateValue dateValue) { + this.dateValue = dateValue; + return this; + } + + public DateValue getDateValue() { + return dateValue; + } + + public QueryParameterPb setEnumValue(EnumValue enumValue) { + this.enumValue = enumValue; + return this; + } + + public EnumValue getEnumValue() { + return enumValue; + } + + public QueryParameterPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public QueryParameterPb setNumericValue(NumericValue numericValue) { + this.numericValue = numericValue; + return this; + } + + public NumericValue getNumericValue() { + return numericValue; + } + + public QueryParameterPb setQueryBackedValue(QueryBackedValue queryBackedValue) { + this.queryBackedValue = queryBackedValue; + return this; + } + + public QueryBackedValue getQueryBackedValue() { + return queryBackedValue; + } + + public QueryParameterPb setTextValue(TextValue textValue) { + this.textValue = textValue; + return this; + } + + public TextValue getTextValue() { + return textValue; + } + + public QueryParameterPb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryParameterPb that = (QueryParameterPb) o; + return Objects.equals(dateRangeValue, that.dateRangeValue) + && Objects.equals(dateValue, that.dateValue) + && Objects.equals(enumValue, that.enumValue) + && Objects.equals(name, that.name) + && Objects.equals(numericValue, that.numericValue) + && Objects.equals(queryBackedValue, that.queryBackedValue) + && Objects.equals(textValue, that.textValue) + && Objects.equals(title, that.title); + } + + @Override + public int hashCode() { + return Objects.hash( + dateRangeValue, + dateValue, + enumValue, + name, + numericValue, + queryBackedValue, + textValue, + title); + } + + @Override + public String toString() { + return new ToStringer(QueryParameterPb.class) + .add("dateRangeValue", dateRangeValue) + .add("dateValue", dateValue) + .add("enumValue", enumValue) + .add("name", name) + .add("numericValue", numericValue) + .add("queryBackedValue", queryBackedValue) + .add("textValue", textValue) + .add("title", title) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPb.java new file mode 100755 index 000000000..e267b5f6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPb.java @@ -0,0 +1,285 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryPb { + @JsonProperty("apply_auto_limit") + private Boolean applyAutoLimit; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("description") + private String description; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_modifier_user_name") + private String lastModifierUserName; + + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("parent_path") + private String parentPath; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("run_as_mode") + private RunAsMode runAsMode; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("update_time") + private String updateTime; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public QueryPb setApplyAutoLimit(Boolean applyAutoLimit) { + this.applyAutoLimit = applyAutoLimit; + return this; + } + + public Boolean getApplyAutoLimit() { + return applyAutoLimit; + } + + public QueryPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public QueryPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public QueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public QueryPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public QueryPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public QueryPb setLastModifierUserName(String lastModifierUserName) { + this.lastModifierUserName = lastModifierUserName; + return this; + } + + public String getLastModifierUserName() { + return lastModifierUserName; + } + + public QueryPb setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public QueryPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public QueryPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public QueryPb setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public QueryPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public QueryPb setRunAsMode(RunAsMode runAsMode) { + this.runAsMode = runAsMode; + return this; + } + + public RunAsMode getRunAsMode() { + return runAsMode; + } + + public QueryPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public QueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public QueryPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public QueryPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryPb that = (QueryPb) o; + return Objects.equals(applyAutoLimit, that.applyAutoLimit) + && Objects.equals(catalog, that.catalog) + && Objects.equals(createTime, that.createTime) + && Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id) + && Objects.equals(lastModifierUserName, that.lastModifierUserName) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(parameters, that.parameters) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(queryText, that.queryText) + && Objects.equals(runAsMode, that.runAsMode) + && Objects.equals(schema, that.schema) + && Objects.equals(tags, that.tags) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + applyAutoLimit, + catalog, + createTime, + description, + displayName, + id, + lastModifierUserName, + lifecycleState, + ownerUserName, + parameters, + parentPath, + queryText, + runAsMode, + schema, + tags, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(QueryPb.class) + .add("applyAutoLimit", applyAutoLimit) + .add("catalog", catalog) + .add("createTime", createTime) + .add("description", description) + .add("displayName", displayName) + .add("id", id) + .add("lastModifierUserName", lastModifierUserName) + .add("lifecycleState", lifecycleState) + .add("ownerUserName", ownerUserName) + .add("parameters", parameters) + .add("parentPath", parentPath) + .add("queryText", queryText) + .add("runAsMode", runAsMode) + .add("schema", schema) + .add("tags", tags) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContent.java index fbc1aedcb..74314931b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContent.java @@ -4,11 +4,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryPostContent.QueryPostContentSerializer.class) +@JsonDeserialize(using = QueryPostContent.QueryPostContentDeserializer.class) public class QueryPostContent { /** * Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -16,17 +27,14 @@ public class QueryPostContent { * *

[Learn more]: https://docs.databricks.com/api/workspace/datasources/list */ - @JsonProperty("data_source_id") private String dataSourceId; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** The title of this query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("name") private String name; /** @@ -34,26 +42,21 @@ public class QueryPostContent { * `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. * It can be overridden at runtime. */ - @JsonProperty("options") private Object options; /** The identifier of the workspace folder containing the object. */ - @JsonProperty("parent") private String parent; /** The text of the query to be run. */ - @JsonProperty("query") private String query; /** * Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as * viewer" behavior) or `"owner"` (signifying "run as owner" behavior) */ - @JsonProperty("run_as_role") private RunAsRole runAsRole; /** */ - @JsonProperty("tags") private Collection tags; public QueryPostContent setDataSourceId(String dataSourceId) { @@ -161,4 +164,52 @@ public String toString() { .add("tags", tags) .toString(); } + + QueryPostContentPb toPb() { + QueryPostContentPb pb = new QueryPostContentPb(); + pb.setDataSourceId(dataSourceId); + pb.setDescription(description); + pb.setName(name); + pb.setOptions(options); + pb.setParent(parent); + pb.setQuery(query); + pb.setRunAsRole(runAsRole); + pb.setTags(tags); + + return pb; + } + + static QueryPostContent fromPb(QueryPostContentPb pb) { + QueryPostContent model = new QueryPostContent(); + model.setDataSourceId(pb.getDataSourceId()); + model.setDescription(pb.getDescription()); + model.setName(pb.getName()); + model.setOptions(pb.getOptions()); + model.setParent(pb.getParent()); + model.setQuery(pb.getQuery()); + model.setRunAsRole(pb.getRunAsRole()); + model.setTags(pb.getTags()); + + return model; + } + + public static class QueryPostContentSerializer extends JsonSerializer { + @Override + public void serialize(QueryPostContent value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryPostContentPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryPostContentDeserializer extends JsonDeserializer { + @Override + public QueryPostContent deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryPostContentPb pb = mapper.readValue(p, QueryPostContentPb.class); + return QueryPostContent.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContentPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContentPb.java new file mode 100755 index 000000000..9f7021537 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContentPb.java @@ -0,0 +1,142 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryPostContentPb { + @JsonProperty("data_source_id") + private String dataSourceId; + + @JsonProperty("description") + private String description; + + @JsonProperty("name") + private String name; + + @JsonProperty("options") + private Object options; + + @JsonProperty("parent") + private String parent; + + @JsonProperty("query") + private String query; + + @JsonProperty("run_as_role") + private RunAsRole runAsRole; + + @JsonProperty("tags") + private Collection tags; + + public QueryPostContentPb setDataSourceId(String dataSourceId) { + this.dataSourceId = dataSourceId; + return this; + } + + public String getDataSourceId() { + return dataSourceId; + } + + public QueryPostContentPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public QueryPostContentPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public QueryPostContentPb setOptions(Object options) { + this.options = options; + return this; + } + + public Object getOptions() { + return options; + } + + public QueryPostContentPb setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public QueryPostContentPb setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public QueryPostContentPb setRunAsRole(RunAsRole runAsRole) { + this.runAsRole = runAsRole; + return this; + } + + public RunAsRole getRunAsRole() { + return runAsRole; + } + + public QueryPostContentPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryPostContentPb that = (QueryPostContentPb) o; + return Objects.equals(dataSourceId, that.dataSourceId) + && Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(options, that.options) + && Objects.equals(parent, that.parent) + && Objects.equals(query, that.query) + && Objects.equals(runAsRole, that.runAsRole) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(dataSourceId, description, name, options, parent, query, runAsRole, tags); + } + + @Override + public String toString() { + return new ToStringer(QueryPostContentPb.class) + .add("dataSourceId", dataSourceId) + .add("description", description) + .add("name", name) + .add("options", options) + .add("parent", parent) + .add("query", query) + .add("runAsRole", runAsRole) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java index 0dacc2b8d..112d84222 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java @@ -21,7 +21,7 @@ public Visualization create(CreateVisualizationRequest request) { String path = "/api/2.0/sql/visualizations"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Visualization.class); @@ -35,7 +35,7 @@ public void delete(DeleteVisualizationRequest request) { String path = String.format("/api/2.0/sql/visualizations/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, Empty.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public Visualization update(UpdateVisualizationRequest request) { String path = String.format("/api/2.0/sql/visualizations/%s", request.getId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, Visualization.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java index e62786295..f583b5e25 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java @@ -21,7 +21,7 @@ public LegacyVisualization create(CreateQueryVisualizationsLegacyRequest request String path = "/api/2.0/preview/sql/visualizations"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LegacyVisualization.class); @@ -35,7 +35,7 @@ public void delete(DeleteQueryVisualizationsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/visualizations/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public LegacyVisualization update(LegacyVisualization request) { String path = String.format("/api/2.0/preview/sql/visualizations/%s", request.getId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, LegacyVisualization.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java index 34188d381..5c9c8a430 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepeatedEndpointConfPairs.RepeatedEndpointConfPairsSerializer.class) +@JsonDeserialize(using = RepeatedEndpointConfPairs.RepeatedEndpointConfPairsDeserializer.class) public class RepeatedEndpointConfPairs { /** Deprecated: Use configuration_pairs */ - @JsonProperty("config_pair") private Collection configPair; /** */ - @JsonProperty("configuration_pairs") private Collection configurationPairs; public RepeatedEndpointConfPairs setConfigPair(Collection configPair) { @@ -58,4 +67,43 @@ public String toString() { .add("configurationPairs", configurationPairs) .toString(); } + + RepeatedEndpointConfPairsPb toPb() { + RepeatedEndpointConfPairsPb pb = new RepeatedEndpointConfPairsPb(); + pb.setConfigPair(configPair); + pb.setConfigurationPairs(configurationPairs); + + return pb; + } + + static RepeatedEndpointConfPairs fromPb(RepeatedEndpointConfPairsPb pb) { + RepeatedEndpointConfPairs model = new RepeatedEndpointConfPairs(); + model.setConfigPair(pb.getConfigPair()); + model.setConfigurationPairs(pb.getConfigurationPairs()); + + return model; + } + + public static class RepeatedEndpointConfPairsSerializer + extends JsonSerializer { + @Override + public void serialize( + RepeatedEndpointConfPairs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepeatedEndpointConfPairsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepeatedEndpointConfPairsDeserializer + extends JsonDeserializer { + @Override + public RepeatedEndpointConfPairs deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepeatedEndpointConfPairsPb pb = mapper.readValue(p, RepeatedEndpointConfPairsPb.class); + return RepeatedEndpointConfPairs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairsPb.java new file mode 100755 index 000000000..3e891970d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairsPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepeatedEndpointConfPairsPb { + @JsonProperty("config_pair") + private Collection configPair; + + @JsonProperty("configuration_pairs") + private Collection configurationPairs; + + public RepeatedEndpointConfPairsPb setConfigPair(Collection configPair) { + this.configPair = configPair; + return this; + } + + public Collection getConfigPair() { + return configPair; + } + + public RepeatedEndpointConfPairsPb setConfigurationPairs( + Collection configurationPairs) { + this.configurationPairs = configurationPairs; + return this; + } + + public Collection getConfigurationPairs() { + return configurationPairs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepeatedEndpointConfPairsPb that = (RepeatedEndpointConfPairsPb) o; + return Objects.equals(configPair, that.configPair) + && Objects.equals(configurationPairs, that.configurationPairs); + } + + @Override + public int hashCode() { + return Objects.hash(configPair, configurationPairs); + } + + @Override + public String toString() { + return new ToStringer(RepeatedEndpointConfPairsPb.class) + .add("configPair", configPair) + .add("configurationPairs", configurationPairs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java index 0110209a3..fec79b87f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Restore a dashboard */ @Generated +@JsonSerialize(using = RestoreDashboardRequest.RestoreDashboardRequestSerializer.class) +@JsonDeserialize(using = RestoreDashboardRequest.RestoreDashboardRequestDeserializer.class) public class RestoreDashboardRequest { /** */ - @JsonIgnore private String dashboardId; + private String dashboardId; public RestoreDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreDashboardRequest.class).add("dashboardId", dashboardId).toString(); } + + RestoreDashboardRequestPb toPb() { + RestoreDashboardRequestPb pb = new RestoreDashboardRequestPb(); + pb.setDashboardId(dashboardId); + + return pb; + } + + static RestoreDashboardRequest fromPb(RestoreDashboardRequestPb pb) { + RestoreDashboardRequest model = new RestoreDashboardRequest(); + model.setDashboardId(pb.getDashboardId()); + + return model; + } + + public static class RestoreDashboardRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RestoreDashboardRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreDashboardRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreDashboardRequestDeserializer + extends JsonDeserializer { + @Override + public RestoreDashboardRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreDashboardRequestPb pb = mapper.readValue(p, RestoreDashboardRequestPb.class); + return RestoreDashboardRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequestPb.java new file mode 100755 index 000000000..8e8ad843b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Restore a dashboard */ +@Generated +class RestoreDashboardRequestPb { + @JsonIgnore private String dashboardId; + + public RestoreDashboardRequestPb setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreDashboardRequestPb that = (RestoreDashboardRequestPb) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(RestoreDashboardRequestPb.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java index 85d6623ea..9790bd68a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Restore a query */ @Generated +@JsonSerialize(using = RestoreQueriesLegacyRequest.RestoreQueriesLegacyRequestSerializer.class) +@JsonDeserialize(using = RestoreQueriesLegacyRequest.RestoreQueriesLegacyRequestDeserializer.class) public class RestoreQueriesLegacyRequest { /** */ - @JsonIgnore private String queryId; + private String queryId; public RestoreQueriesLegacyRequest setQueryId(String queryId) { this.queryId = queryId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreQueriesLegacyRequest.class).add("queryId", queryId).toString(); } + + RestoreQueriesLegacyRequestPb toPb() { + RestoreQueriesLegacyRequestPb pb = new RestoreQueriesLegacyRequestPb(); + pb.setQueryId(queryId); + + return pb; + } + + static RestoreQueriesLegacyRequest fromPb(RestoreQueriesLegacyRequestPb pb) { + RestoreQueriesLegacyRequest model = new RestoreQueriesLegacyRequest(); + model.setQueryId(pb.getQueryId()); + + return model; + } + + public static class RestoreQueriesLegacyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RestoreQueriesLegacyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreQueriesLegacyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreQueriesLegacyRequestDeserializer + extends JsonDeserializer { + @Override + public RestoreQueriesLegacyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreQueriesLegacyRequestPb pb = mapper.readValue(p, RestoreQueriesLegacyRequestPb.class); + return RestoreQueriesLegacyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequestPb.java new file mode 100755 index 000000000..15177a450 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Restore a query */ +@Generated +class RestoreQueriesLegacyRequestPb { + @JsonIgnore private String queryId; + + public RestoreQueriesLegacyRequestPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreQueriesLegacyRequestPb that = (RestoreQueriesLegacyRequestPb) o; + return Objects.equals(queryId, that.queryId); + } + + @Override + public int hashCode() { + return Objects.hash(queryId); + } + + @Override + public String toString() { + return new ToStringer(RestoreQueriesLegacyRequestPb.class).add("queryId", queryId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java index 09f49de67..dbcee9730 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RestoreResponse.RestoreResponseSerializer.class) +@JsonDeserialize(using = RestoreResponse.RestoreResponseDeserializer.class) public class RestoreResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(RestoreResponse.class).toString(); } + + RestoreResponsePb toPb() { + RestoreResponsePb pb = new RestoreResponsePb(); + + return pb; + } + + static RestoreResponse fromPb(RestoreResponsePb pb) { + RestoreResponse model = new RestoreResponse(); + + return model; + } + + public static class RestoreResponseSerializer extends JsonSerializer { + @Override + public void serialize(RestoreResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RestoreResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RestoreResponseDeserializer extends JsonDeserializer { + @Override + public RestoreResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RestoreResponsePb pb = mapper.readValue(p, RestoreResponsePb.class); + return RestoreResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponsePb.java new file mode 100755 index 000000000..9046f49df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class RestoreResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RestoreResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java index be696df15..6675d54f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java @@ -4,32 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ResultData.ResultDataSerializer.class) +@JsonDeserialize(using = ResultData.ResultDataDeserializer.class) public class ResultData { /** * The number of bytes in the result chunk. This field is not available when using `INLINE` * disposition. */ - @JsonProperty("byte_count") private Long byteCount; /** The position within the sequence of result set chunks. */ - @JsonProperty("chunk_index") private Long chunkIndex; /** * The `JSON_ARRAY` format is an array of arrays of values, where each non-null value is formatted * as a string. Null values are encoded as JSON `null`. */ - @JsonProperty("data_array") private Collection> dataArray; /** */ - @JsonProperty("external_links") private Collection externalLinks; /** @@ -37,7 +44,6 @@ public class ResultData { * no more chunks. The next chunk can be fetched with a * :method:statementexecution/getStatementResultChunkN request. */ - @JsonProperty("next_chunk_index") private Long nextChunkIndex; /** @@ -45,15 +51,12 @@ public class ResultData { * more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and * should be treated as an opaque link. This is an alternative to using `next_chunk_index`. */ - @JsonProperty("next_chunk_internal_link") private String nextChunkInternalLink; /** The number of rows within the result chunk. */ - @JsonProperty("row_count") private Long rowCount; /** The starting row offset within the result set. */ - @JsonProperty("row_offset") private Long rowOffset; public ResultData setByteCount(Long byteCount) { @@ -169,4 +172,51 @@ public String toString() { .add("rowOffset", rowOffset) .toString(); } + + ResultDataPb toPb() { + ResultDataPb pb = new ResultDataPb(); + pb.setByteCount(byteCount); + pb.setChunkIndex(chunkIndex); + pb.setDataArray(dataArray); + pb.setExternalLinks(externalLinks); + pb.setNextChunkIndex(nextChunkIndex); + pb.setNextChunkInternalLink(nextChunkInternalLink); + pb.setRowCount(rowCount); + pb.setRowOffset(rowOffset); + + return pb; + } + + static ResultData fromPb(ResultDataPb pb) { + ResultData model = new ResultData(); + model.setByteCount(pb.getByteCount()); + model.setChunkIndex(pb.getChunkIndex()); + model.setDataArray(pb.getDataArray()); + model.setExternalLinks(pb.getExternalLinks()); + model.setNextChunkIndex(pb.getNextChunkIndex()); + model.setNextChunkInternalLink(pb.getNextChunkInternalLink()); + model.setRowCount(pb.getRowCount()); + model.setRowOffset(pb.getRowOffset()); + + return model; + } + + public static class ResultDataSerializer extends JsonSerializer { + @Override + public void serialize(ResultData value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultDataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultDataDeserializer extends JsonDeserializer { + @Override + public ResultData deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultDataPb pb = mapper.readValue(p, ResultDataPb.class); + return ResultData.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultDataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultDataPb.java new file mode 100755 index 000000000..a08375223 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultDataPb.java @@ -0,0 +1,150 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ResultDataPb { + @JsonProperty("byte_count") + private Long byteCount; + + @JsonProperty("chunk_index") + private Long chunkIndex; + + @JsonProperty("data_array") + private Collection> dataArray; + + @JsonProperty("external_links") + private Collection externalLinks; + + @JsonProperty("next_chunk_index") + private Long nextChunkIndex; + + @JsonProperty("next_chunk_internal_link") + private String nextChunkInternalLink; + + @JsonProperty("row_count") + private Long rowCount; + + @JsonProperty("row_offset") + private Long rowOffset; + + public ResultDataPb setByteCount(Long byteCount) { + this.byteCount = byteCount; + return this; + } + + public Long getByteCount() { + return byteCount; + } + + public ResultDataPb setChunkIndex(Long chunkIndex) { + this.chunkIndex = chunkIndex; + return this; + } + + public Long getChunkIndex() { + return chunkIndex; + } + + public ResultDataPb setDataArray(Collection> dataArray) { + this.dataArray = dataArray; + return this; + } + + public Collection> getDataArray() { + return dataArray; + } + + public ResultDataPb setExternalLinks(Collection externalLinks) { + this.externalLinks = externalLinks; + return this; + } + + public Collection getExternalLinks() { + return externalLinks; + } + + public ResultDataPb setNextChunkIndex(Long nextChunkIndex) { + this.nextChunkIndex = nextChunkIndex; + return this; + } + + public Long getNextChunkIndex() { + return nextChunkIndex; + } + + public ResultDataPb setNextChunkInternalLink(String nextChunkInternalLink) { + this.nextChunkInternalLink = nextChunkInternalLink; + return this; + } + + public String getNextChunkInternalLink() { + return nextChunkInternalLink; + } + + public ResultDataPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + public ResultDataPb setRowOffset(Long rowOffset) { + this.rowOffset = rowOffset; + return this; + } + + public Long getRowOffset() { + return rowOffset; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultDataPb that = (ResultDataPb) o; + return Objects.equals(byteCount, that.byteCount) + && Objects.equals(chunkIndex, that.chunkIndex) + && Objects.equals(dataArray, that.dataArray) + && Objects.equals(externalLinks, that.externalLinks) + && Objects.equals(nextChunkIndex, that.nextChunkIndex) + && Objects.equals(nextChunkInternalLink, that.nextChunkInternalLink) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(rowOffset, that.rowOffset); + } + + @Override + public int hashCode() { + return Objects.hash( + byteCount, + chunkIndex, + dataArray, + externalLinks, + nextChunkIndex, + nextChunkInternalLink, + rowCount, + rowOffset); + } + + @Override + public String toString() { + return new ToStringer(ResultDataPb.class) + .add("byteCount", byteCount) + .add("chunkIndex", chunkIndex) + .add("dataArray", dataArray) + .add("externalLinks", externalLinks) + .add("nextChunkIndex", nextChunkIndex) + .add("nextChunkInternalLink", nextChunkInternalLink) + .add("rowCount", rowCount) + .add("rowOffset", rowOffset) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java index d4189efa2..24886b439 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java @@ -4,42 +4,46 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The result manifest provides schema and metadata for the result set. */ @Generated +@JsonSerialize(using = ResultManifest.ResultManifestSerializer.class) +@JsonDeserialize(using = ResultManifest.ResultManifestDeserializer.class) public class ResultManifest { /** Array of result set chunk metadata. */ - @JsonProperty("chunks") private Collection chunks; /** */ - @JsonProperty("format") private Format format; /** The schema is an ordered list of column descriptions. */ - @JsonProperty("schema") private ResultSchema schema; /** * The total number of bytes in the result set. This field is not available when using `INLINE` * disposition. */ - @JsonProperty("total_byte_count") private Long totalByteCount; /** The total number of chunks that the result set has been divided into. */ - @JsonProperty("total_chunk_count") private Long totalChunkCount; /** The total number of rows in the result set. */ - @JsonProperty("total_row_count") private Long totalRowCount; /** Indicates whether the result is truncated due to `row_limit` or `byte_limit`. */ - @JsonProperty("truncated") private Boolean truncated; public ResultManifest setChunks(Collection chunks) { @@ -137,4 +141,50 @@ public String toString() { .add("truncated", truncated) .toString(); } + + ResultManifestPb toPb() { + ResultManifestPb pb = new ResultManifestPb(); + pb.setChunks(chunks); + pb.setFormat(format); + pb.setSchema(schema); + pb.setTotalByteCount(totalByteCount); + pb.setTotalChunkCount(totalChunkCount); + pb.setTotalRowCount(totalRowCount); + pb.setTruncated(truncated); + + return pb; + } + + static ResultManifest fromPb(ResultManifestPb pb) { + ResultManifest model = new ResultManifest(); + model.setChunks(pb.getChunks()); + model.setFormat(pb.getFormat()); + model.setSchema(pb.getSchema()); + model.setTotalByteCount(pb.getTotalByteCount()); + model.setTotalChunkCount(pb.getTotalChunkCount()); + model.setTotalRowCount(pb.getTotalRowCount()); + model.setTruncated(pb.getTruncated()); + + return model; + } + + public static class ResultManifestSerializer extends JsonSerializer { + @Override + public void serialize(ResultManifest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultManifestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultManifestDeserializer extends JsonDeserializer { + @Override + public ResultManifest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultManifestPb pb = mapper.readValue(p, ResultManifestPb.class); + return ResultManifest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifestPb.java new file mode 100755 index 000000000..e0353014d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifestPb.java @@ -0,0 +1,130 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The result manifest provides schema and metadata for the result set. */ +@Generated +class ResultManifestPb { + @JsonProperty("chunks") + private Collection chunks; + + @JsonProperty("format") + private Format format; + + @JsonProperty("schema") + private ResultSchema schema; + + @JsonProperty("total_byte_count") + private Long totalByteCount; + + @JsonProperty("total_chunk_count") + private Long totalChunkCount; + + @JsonProperty("total_row_count") + private Long totalRowCount; + + @JsonProperty("truncated") + private Boolean truncated; + + public ResultManifestPb setChunks(Collection chunks) { + this.chunks = chunks; + return this; + } + + public Collection getChunks() { + return chunks; + } + + public ResultManifestPb setFormat(Format format) { + this.format = format; + return this; + } + + public Format getFormat() { + return format; + } + + public ResultManifestPb setSchema(ResultSchema schema) { + this.schema = schema; + return this; + } + + public ResultSchema getSchema() { + return schema; + } + + public ResultManifestPb setTotalByteCount(Long totalByteCount) { + this.totalByteCount = totalByteCount; + return this; + } + + public Long getTotalByteCount() { + return totalByteCount; + } + + public ResultManifestPb setTotalChunkCount(Long totalChunkCount) { + this.totalChunkCount = totalChunkCount; + return this; + } + + public Long getTotalChunkCount() { + return totalChunkCount; + } + + public ResultManifestPb setTotalRowCount(Long totalRowCount) { + this.totalRowCount = totalRowCount; + return this; + } + + public Long getTotalRowCount() { + return totalRowCount; + } + + public ResultManifestPb setTruncated(Boolean truncated) { + this.truncated = truncated; + return this; + } + + public Boolean getTruncated() { + return truncated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultManifestPb that = (ResultManifestPb) o; + return Objects.equals(chunks, that.chunks) + && Objects.equals(format, that.format) + && Objects.equals(schema, that.schema) + && Objects.equals(totalByteCount, that.totalByteCount) + && Objects.equals(totalChunkCount, that.totalChunkCount) + && Objects.equals(totalRowCount, that.totalRowCount) + && Objects.equals(truncated, that.truncated); + } + + @Override + public int hashCode() { + return Objects.hash( + chunks, format, schema, totalByteCount, totalChunkCount, totalRowCount, truncated); + } + + @Override + public String toString() { + return new ToStringer(ResultManifestPb.class) + .add("chunks", chunks) + .add("format", format) + .add("schema", schema) + .add("totalByteCount", totalByteCount) + .add("totalChunkCount", totalChunkCount) + .add("totalRowCount", totalRowCount) + .add("truncated", truncated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java index 034793281..7929fa1b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** The schema is an ordered list of column descriptions. */ @Generated +@JsonSerialize(using = ResultSchema.ResultSchemaSerializer.class) +@JsonDeserialize(using = ResultSchema.ResultSchemaDeserializer.class) public class ResultSchema { /** */ - @JsonProperty("column_count") private Long columnCount; /** */ - @JsonProperty("columns") private Collection columns; public ResultSchema setColumnCount(Long columnCount) { @@ -57,4 +66,39 @@ public String toString() { .add("columns", columns) .toString(); } + + ResultSchemaPb toPb() { + ResultSchemaPb pb = new ResultSchemaPb(); + pb.setColumnCount(columnCount); + pb.setColumns(columns); + + return pb; + } + + static ResultSchema fromPb(ResultSchemaPb pb) { + ResultSchema model = new ResultSchema(); + model.setColumnCount(pb.getColumnCount()); + model.setColumns(pb.getColumns()); + + return model; + } + + public static class ResultSchemaSerializer extends JsonSerializer { + @Override + public void serialize(ResultSchema value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultSchemaPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultSchemaDeserializer extends JsonDeserializer { + @Override + public ResultSchema deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultSchemaPb pb = mapper.readValue(p, ResultSchemaPb.class); + return ResultSchema.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchemaPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchemaPb.java new file mode 100755 index 000000000..12747e375 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchemaPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** The schema is an ordered list of column descriptions. */ +@Generated +class ResultSchemaPb { + @JsonProperty("column_count") + private Long columnCount; + + @JsonProperty("columns") + private Collection columns; + + public ResultSchemaPb setColumnCount(Long columnCount) { + this.columnCount = columnCount; + return this; + } + + public Long getColumnCount() { + return columnCount; + } + + public ResultSchemaPb setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultSchemaPb that = (ResultSchemaPb) o; + return Objects.equals(columnCount, that.columnCount) && Objects.equals(columns, that.columns); + } + + @Override + public int hashCode() { + return Objects.hash(columnCount, columns); + } + + @Override + public String toString() { + return new ToStringer(ResultSchemaPb.class) + .add("columnCount", columnCount) + .add("columns", columns) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java index 5a22d70ea..58e79e70f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ServiceError.ServiceErrorSerializer.class) +@JsonDeserialize(using = ServiceError.ServiceErrorDeserializer.class) public class ServiceError { /** */ - @JsonProperty("error_code") private ServiceErrorCode errorCode; /** A brief summary of the error condition. */ - @JsonProperty("message") private String message; public ServiceError setErrorCode(ServiceErrorCode errorCode) { @@ -55,4 +64,39 @@ public String toString() { .add("message", message) .toString(); } + + ServiceErrorPb toPb() { + ServiceErrorPb pb = new ServiceErrorPb(); + pb.setErrorCode(errorCode); + pb.setMessage(message); + + return pb; + } + + static ServiceError fromPb(ServiceErrorPb pb) { + ServiceError model = new ServiceError(); + model.setErrorCode(pb.getErrorCode()); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class ServiceErrorSerializer extends JsonSerializer { + @Override + public void serialize(ServiceError value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ServiceErrorPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ServiceErrorDeserializer extends JsonDeserializer { + @Override + public ServiceError deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ServiceErrorPb pb = mapper.readValue(p, ServiceErrorPb.class); + return ServiceError.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceErrorPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceErrorPb.java new file mode 100755 index 000000000..065ca1047 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceErrorPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ServiceErrorPb { + @JsonProperty("error_code") + private ServiceErrorCode errorCode; + + @JsonProperty("message") + private String message; + + public ServiceErrorPb setErrorCode(ServiceErrorCode errorCode) { + this.errorCode = errorCode; + return this; + } + + public ServiceErrorCode getErrorCode() { + return errorCode; + } + + public ServiceErrorPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ServiceErrorPb that = (ServiceErrorPb) o; + return Objects.equals(errorCode, that.errorCode) && Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(errorCode, message); + } + + @Override + public String toString() { + return new ToStringer(ServiceErrorPb.class) + .add("errorCode", errorCode) + .add("message", message) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java index b6b8de1b3..5f583076e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java @@ -4,25 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Set object ACL */ @Generated +@JsonSerialize(using = SetRequest.SetRequestSerializer.class) +@JsonDeserialize(using = SetRequest.SetRequestDeserializer.class) public class SetRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** * Object ID. The ACL for the object with this UUID is overwritten by this request's POST content. */ - @JsonIgnore private String objectId; + private String objectId; /** The type of object permission to set. */ - @JsonIgnore private ObjectTypePlural objectType; + private ObjectTypePlural objectType; public SetRequest setAccessControlList(Collection accessControlList) { this.accessControlList = accessControlList; @@ -74,4 +83,41 @@ public String toString() { .add("objectType", objectType) .toString(); } + + SetRequestPb toPb() { + SetRequestPb pb = new SetRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static SetRequest fromPb(SetRequestPb pb) { + SetRequest model = new SetRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class SetRequestSerializer extends JsonSerializer { + @Override + public void serialize(SetRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetRequestDeserializer extends JsonDeserializer { + @Override + public SetRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetRequestPb pb = mapper.readValue(p, SetRequestPb.class); + return SetRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequestPb.java new file mode 100755 index 000000000..b830d6e72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Set object ACL */ +@Generated +class SetRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String objectId; + + @JsonIgnore private ObjectTypePlural objectType; + + public SetRequestPb setAccessControlList(Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public SetRequestPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public SetRequestPb setObjectType(ObjectTypePlural objectType) { + this.objectType = objectType; + return this; + } + + public ObjectTypePlural getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetRequestPb that = (SetRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(SetRequestPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java index 180d925da..97af0ed48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = SetResponse.SetResponseSerializer.class) +@JsonDeserialize(using = SetResponse.SetResponseDeserializer.class) public class SetResponse { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** An object's type and UUID, separated by a forward slash (/) character. */ - @JsonProperty("object_id") private String objectId; /** A singular noun object type. */ - @JsonProperty("object_type") private ObjectType objectType; public SetResponse setAccessControlList(Collection accessControlList) { @@ -72,4 +80,41 @@ public String toString() { .add("objectType", objectType) .toString(); } + + SetResponsePb toPb() { + SetResponsePb pb = new SetResponsePb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static SetResponse fromPb(SetResponsePb pb) { + SetResponse model = new SetResponse(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class SetResponseSerializer extends JsonSerializer { + @Override + public void serialize(SetResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetResponseDeserializer extends JsonDeserializer { + @Override + public SetResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetResponsePb pb = mapper.readValue(p, SetResponsePb.class); + return SetResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponsePb.java new file mode 100755 index 000000000..336da3c8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponsePb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SetResponsePb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private ObjectType objectType; + + public SetResponsePb setAccessControlList(Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public SetResponsePb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public SetResponsePb setObjectType(ObjectType objectType) { + this.objectType = objectType; + return this; + } + + public ObjectType getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetResponsePb that = (SetResponsePb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(SetResponsePb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java index fed2b2a4e..b18c82579 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java @@ -4,25 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = SetWorkspaceWarehouseConfigRequest.SetWorkspaceWarehouseConfigRequestSerializer.class) +@JsonDeserialize( + using = SetWorkspaceWarehouseConfigRequest.SetWorkspaceWarehouseConfigRequestDeserializer.class) public class SetWorkspaceWarehouseConfigRequest { /** Optional: Channel selection details */ - @JsonProperty("channel") private Channel channel; /** Deprecated: Use sql_configuration_parameters */ - @JsonProperty("config_param") private RepeatedEndpointConfPairs configParam; /** * Spark confs for external hive metastore configuration JSON serialized size must be less than <= * 512K */ - @JsonProperty("data_access_config") private Collection dataAccessConfig; /** @@ -32,27 +42,21 @@ public class SetWorkspaceWarehouseConfigRequest { * to be converted to another type. Used by frontend to save specific type availability in the * warehouse create and edit form UI. */ - @JsonProperty("enabled_warehouse_types") private Collection enabledWarehouseTypes; /** Deprecated: Use sql_configuration_parameters */ - @JsonProperty("global_param") private RepeatedEndpointConfPairs globalParam; /** GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage */ - @JsonProperty("google_service_account") private String googleServiceAccount; /** AWS Only: Instance profile used to pass IAM role to the cluster */ - @JsonProperty("instance_profile_arn") private String instanceProfileArn; /** Security policy for warehouses */ - @JsonProperty("security_policy") private SetWorkspaceWarehouseConfigRequestSecurityPolicy securityPolicy; /** SQL configuration parameters */ - @JsonProperty("sql_configuration_parameters") private RepeatedEndpointConfPairs sqlConfigurationParameters; public SetWorkspaceWarehouseConfigRequest setChannel(Channel channel) { @@ -184,4 +188,58 @@ public String toString() { .add("sqlConfigurationParameters", sqlConfigurationParameters) .toString(); } + + SetWorkspaceWarehouseConfigRequestPb toPb() { + SetWorkspaceWarehouseConfigRequestPb pb = new SetWorkspaceWarehouseConfigRequestPb(); + pb.setChannel(channel); + pb.setConfigParam(configParam); + pb.setDataAccessConfig(dataAccessConfig); + pb.setEnabledWarehouseTypes(enabledWarehouseTypes); + pb.setGlobalParam(globalParam); + pb.setGoogleServiceAccount(googleServiceAccount); + pb.setInstanceProfileArn(instanceProfileArn); + pb.setSecurityPolicy(securityPolicy); + pb.setSqlConfigurationParameters(sqlConfigurationParameters); + + return pb; + } + + static SetWorkspaceWarehouseConfigRequest fromPb(SetWorkspaceWarehouseConfigRequestPb pb) { + SetWorkspaceWarehouseConfigRequest model = new SetWorkspaceWarehouseConfigRequest(); + model.setChannel(pb.getChannel()); + model.setConfigParam(pb.getConfigParam()); + model.setDataAccessConfig(pb.getDataAccessConfig()); + model.setEnabledWarehouseTypes(pb.getEnabledWarehouseTypes()); + model.setGlobalParam(pb.getGlobalParam()); + model.setGoogleServiceAccount(pb.getGoogleServiceAccount()); + model.setInstanceProfileArn(pb.getInstanceProfileArn()); + model.setSecurityPolicy(pb.getSecurityPolicy()); + model.setSqlConfigurationParameters(pb.getSqlConfigurationParameters()); + + return model; + } + + public static class SetWorkspaceWarehouseConfigRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + SetWorkspaceWarehouseConfigRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetWorkspaceWarehouseConfigRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetWorkspaceWarehouseConfigRequestDeserializer + extends JsonDeserializer { + @Override + public SetWorkspaceWarehouseConfigRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetWorkspaceWarehouseConfigRequestPb pb = + mapper.readValue(p, SetWorkspaceWarehouseConfigRequestPb.class); + return SetWorkspaceWarehouseConfigRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestPb.java new file mode 100755 index 000000000..09e481b68 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestPb.java @@ -0,0 +1,171 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class SetWorkspaceWarehouseConfigRequestPb { + @JsonProperty("channel") + private Channel channel; + + @JsonProperty("config_param") + private RepeatedEndpointConfPairs configParam; + + @JsonProperty("data_access_config") + private Collection dataAccessConfig; + + @JsonProperty("enabled_warehouse_types") + private Collection enabledWarehouseTypes; + + @JsonProperty("global_param") + private RepeatedEndpointConfPairs globalParam; + + @JsonProperty("google_service_account") + private String googleServiceAccount; + + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + + @JsonProperty("security_policy") + private SetWorkspaceWarehouseConfigRequestSecurityPolicy securityPolicy; + + @JsonProperty("sql_configuration_parameters") + private RepeatedEndpointConfPairs sqlConfigurationParameters; + + public SetWorkspaceWarehouseConfigRequestPb setChannel(Channel channel) { + this.channel = channel; + return this; + } + + public Channel getChannel() { + return channel; + } + + public SetWorkspaceWarehouseConfigRequestPb setConfigParam( + RepeatedEndpointConfPairs configParam) { + this.configParam = configParam; + return this; + } + + public RepeatedEndpointConfPairs getConfigParam() { + return configParam; + } + + public SetWorkspaceWarehouseConfigRequestPb setDataAccessConfig( + Collection dataAccessConfig) { + this.dataAccessConfig = dataAccessConfig; + return this; + } + + public Collection getDataAccessConfig() { + return dataAccessConfig; + } + + public SetWorkspaceWarehouseConfigRequestPb setEnabledWarehouseTypes( + Collection enabledWarehouseTypes) { + this.enabledWarehouseTypes = enabledWarehouseTypes; + return this; + } + + public Collection getEnabledWarehouseTypes() { + return enabledWarehouseTypes; + } + + public SetWorkspaceWarehouseConfigRequestPb setGlobalParam( + RepeatedEndpointConfPairs globalParam) { + this.globalParam = globalParam; + return this; + } + + public RepeatedEndpointConfPairs getGlobalParam() { + return globalParam; + } + + public SetWorkspaceWarehouseConfigRequestPb setGoogleServiceAccount(String googleServiceAccount) { + this.googleServiceAccount = googleServiceAccount; + return this; + } + + public String getGoogleServiceAccount() { + return googleServiceAccount; + } + + public SetWorkspaceWarehouseConfigRequestPb setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + + public SetWorkspaceWarehouseConfigRequestPb setSecurityPolicy( + SetWorkspaceWarehouseConfigRequestSecurityPolicy securityPolicy) { + this.securityPolicy = securityPolicy; + return this; + } + + public SetWorkspaceWarehouseConfigRequestSecurityPolicy getSecurityPolicy() { + return securityPolicy; + } + + public SetWorkspaceWarehouseConfigRequestPb setSqlConfigurationParameters( + RepeatedEndpointConfPairs sqlConfigurationParameters) { + this.sqlConfigurationParameters = sqlConfigurationParameters; + return this; + } + + public RepeatedEndpointConfPairs getSqlConfigurationParameters() { + return sqlConfigurationParameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetWorkspaceWarehouseConfigRequestPb that = (SetWorkspaceWarehouseConfigRequestPb) o; + return Objects.equals(channel, that.channel) + && Objects.equals(configParam, that.configParam) + && Objects.equals(dataAccessConfig, that.dataAccessConfig) + && Objects.equals(enabledWarehouseTypes, that.enabledWarehouseTypes) + && Objects.equals(globalParam, that.globalParam) + && Objects.equals(googleServiceAccount, that.googleServiceAccount) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) + && Objects.equals(securityPolicy, that.securityPolicy) + && Objects.equals(sqlConfigurationParameters, that.sqlConfigurationParameters); + } + + @Override + public int hashCode() { + return Objects.hash( + channel, + configParam, + dataAccessConfig, + enabledWarehouseTypes, + globalParam, + googleServiceAccount, + instanceProfileArn, + securityPolicy, + sqlConfigurationParameters); + } + + @Override + public String toString() { + return new ToStringer(SetWorkspaceWarehouseConfigRequestPb.class) + .add("channel", channel) + .add("configParam", configParam) + .add("dataAccessConfig", dataAccessConfig) + .add("enabledWarehouseTypes", enabledWarehouseTypes) + .add("globalParam", globalParam) + .add("googleServiceAccount", googleServiceAccount) + .add("instanceProfileArn", instanceProfileArn) + .add("securityPolicy", securityPolicy) + .add("sqlConfigurationParameters", sqlConfigurationParameters) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java index 82b47fdba..3c2cdbe26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java @@ -4,9 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = SetWorkspaceWarehouseConfigResponse.SetWorkspaceWarehouseConfigResponseSerializer.class) +@JsonDeserialize( + using = + SetWorkspaceWarehouseConfigResponse.SetWorkspaceWarehouseConfigResponseDeserializer.class) public class SetWorkspaceWarehouseConfigResponse { @Override @@ -25,4 +40,40 @@ public int hashCode() { public String toString() { return new ToStringer(SetWorkspaceWarehouseConfigResponse.class).toString(); } + + SetWorkspaceWarehouseConfigResponsePb toPb() { + SetWorkspaceWarehouseConfigResponsePb pb = new SetWorkspaceWarehouseConfigResponsePb(); + + return pb; + } + + static SetWorkspaceWarehouseConfigResponse fromPb(SetWorkspaceWarehouseConfigResponsePb pb) { + SetWorkspaceWarehouseConfigResponse model = new SetWorkspaceWarehouseConfigResponse(); + + return model; + } + + public static class SetWorkspaceWarehouseConfigResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + SetWorkspaceWarehouseConfigResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SetWorkspaceWarehouseConfigResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SetWorkspaceWarehouseConfigResponseDeserializer + extends JsonDeserializer { + @Override + public SetWorkspaceWarehouseConfigResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SetWorkspaceWarehouseConfigResponsePb pb = + mapper.readValue(p, SetWorkspaceWarehouseConfigResponsePb.class); + return SetWorkspaceWarehouseConfigResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponsePb.java new file mode 100755 index 000000000..6817d3755 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SetWorkspaceWarehouseConfigResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetWorkspaceWarehouseConfigResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java index 15ea041b0..8e4e98941 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Start a warehouse */ @Generated +@JsonSerialize(using = StartRequest.StartRequestSerializer.class) +@JsonDeserialize(using = StartRequest.StartRequestDeserializer.class) public class StartRequest { /** Required. Id of the SQL warehouse. */ - @JsonIgnore private String id; + private String id; public StartRequest setId(String id) { this.id = id; @@ -39,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(StartRequest.class).add("id", id).toString(); } + + StartRequestPb toPb() { + StartRequestPb pb = new StartRequestPb(); + pb.setId(id); + + return pb; + } + + static StartRequest fromPb(StartRequestPb pb) { + StartRequest model = new StartRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class StartRequestSerializer extends JsonSerializer { + @Override + public void serialize(StartRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartRequestDeserializer extends JsonDeserializer { + @Override + public StartRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartRequestPb pb = mapper.readValue(p, StartRequestPb.class); + return StartRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequestPb.java new file mode 100755 index 000000000..a49a23eef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Start a warehouse */ +@Generated +class StartRequestPb { + @JsonIgnore private String id; + + public StartRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartRequestPb that = (StartRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(StartRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java index 32fc7af3c..3793fa484 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StartWarehouseResponse.StartWarehouseResponseSerializer.class) +@JsonDeserialize(using = StartWarehouseResponse.StartWarehouseResponseDeserializer.class) public class StartWarehouseResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(StartWarehouseResponse.class).toString(); } + + StartWarehouseResponsePb toPb() { + StartWarehouseResponsePb pb = new StartWarehouseResponsePb(); + + return pb; + } + + static StartWarehouseResponse fromPb(StartWarehouseResponsePb pb) { + StartWarehouseResponse model = new StartWarehouseResponse(); + + return model; + } + + public static class StartWarehouseResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + StartWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StartWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StartWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public StartWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StartWarehouseResponsePb pb = mapper.readValue(p, StartWarehouseResponsePb.class); + return StartWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponsePb.java new file mode 100755 index 000000000..009006ac3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class StartWarehouseResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(StartWarehouseResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java index e6b49df84..c6cbef486 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java @@ -21,7 +21,7 @@ public void cancelExecution(CancelExecutionRequest request) { String path = String.format("/api/2.0/sql/statements/%s/cancel", request.getStatementId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); apiClient.execute(req, CancelExecutionResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -33,7 +33,7 @@ public StatementResponse executeStatement(ExecuteStatementRequest request) { String path = "/api/2.0/sql/statements/"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, StatementResponse.class); @@ -47,7 +47,7 @@ public StatementResponse getStatement(GetStatementRequest request) { String path = String.format("/api/2.0/sql/statements/%s", request.getStatementId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, StatementResponse.class); } catch (IOException e) { @@ -63,7 +63,7 @@ public ResultData getStatementResultChunkN(GetStatementResultChunkNRequest reque request.getStatementId(), request.getChunkIndex()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ResultData.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItem.java index 29dcc8b9d..ea1b36348 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItem.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StatementParameterListItem.StatementParameterListItemSerializer.class) +@JsonDeserialize(using = StatementParameterListItem.StatementParameterListItemDeserializer.class) public class StatementParameterListItem { /** The name of a parameter marker to be substituted in the statement. */ - @JsonProperty("name") private String name; /** @@ -21,13 +31,11 @@ public class StatementParameterListItem { * *

[Data types]: https://docs.databricks.com/sql/language-manual/functions/cast.html */ - @JsonProperty("type") private String typeValue; /** * The value to substitute, represented as a string. If omitted, the value is interpreted as NULL. */ - @JsonProperty("value") private String value; public StatementParameterListItem setName(String name) { @@ -80,4 +88,45 @@ public String toString() { .add("value", value) .toString(); } + + StatementParameterListItemPb toPb() { + StatementParameterListItemPb pb = new StatementParameterListItemPb(); + pb.setName(name); + pb.setType(typeValue); + pb.setValue(value); + + return pb; + } + + static StatementParameterListItem fromPb(StatementParameterListItemPb pb) { + StatementParameterListItem model = new StatementParameterListItem(); + model.setName(pb.getName()); + model.setType(pb.getType()); + model.setValue(pb.getValue()); + + return model; + } + + public static class StatementParameterListItemSerializer + extends JsonSerializer { + @Override + public void serialize( + StatementParameterListItem value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StatementParameterListItemPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StatementParameterListItemDeserializer + extends JsonDeserializer { + @Override + public StatementParameterListItem deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StatementParameterListItemPb pb = mapper.readValue(p, StatementParameterListItemPb.class); + return StatementParameterListItem.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItemPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItemPb.java new file mode 100755 index 000000000..4afec1871 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementParameterListItemPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StatementParameterListItemPb { + @JsonProperty("name") + private String name; + + @JsonProperty("type") + private String typeValue; + + @JsonProperty("value") + private String value; + + public StatementParameterListItemPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public StatementParameterListItemPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + public StatementParameterListItemPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatementParameterListItemPb that = (StatementParameterListItemPb) o; + return Objects.equals(name, that.name) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(name, typeValue, value); + } + + @Override + public String toString() { + return new ToStringer(StatementParameterListItemPb.class) + .add("name", name) + .add("typeValue", typeValue) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java index d9fb5469e..69c222666 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StatementResponse.StatementResponseSerializer.class) +@JsonDeserialize(using = StatementResponse.StatementResponseDeserializer.class) public class StatementResponse { /** The result manifest provides schema and metadata for the result set. */ - @JsonProperty("manifest") private ResultManifest manifest; /** */ - @JsonProperty("result") private ResultData result; /** * The statement ID is returned upon successfully submitting a SQL statement, and is a required * reference for all subsequent calls. */ - @JsonProperty("statement_id") private String statementId; /** The status response includes execution state and if relevant, error information. */ - @JsonProperty("status") private StatementStatus status; public StatementResponse setManifest(ResultManifest manifest) { @@ -89,4 +96,44 @@ public String toString() { .add("status", status) .toString(); } + + StatementResponsePb toPb() { + StatementResponsePb pb = new StatementResponsePb(); + pb.setManifest(manifest); + pb.setResult(result); + pb.setStatementId(statementId); + pb.setStatus(status); + + return pb; + } + + static StatementResponse fromPb(StatementResponsePb pb) { + StatementResponse model = new StatementResponse(); + model.setManifest(pb.getManifest()); + model.setResult(pb.getResult()); + model.setStatementId(pb.getStatementId()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class StatementResponseSerializer extends JsonSerializer { + @Override + public void serialize(StatementResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StatementResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StatementResponseDeserializer extends JsonDeserializer { + @Override + public StatementResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StatementResponsePb pb = mapper.readValue(p, StatementResponsePb.class); + return StatementResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponsePb.java new file mode 100755 index 000000000..3c822b536 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponsePb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class StatementResponsePb { + @JsonProperty("manifest") + private ResultManifest manifest; + + @JsonProperty("result") + private ResultData result; + + @JsonProperty("statement_id") + private String statementId; + + @JsonProperty("status") + private StatementStatus status; + + public StatementResponsePb setManifest(ResultManifest manifest) { + this.manifest = manifest; + return this; + } + + public ResultManifest getManifest() { + return manifest; + } + + public StatementResponsePb setResult(ResultData result) { + this.result = result; + return this; + } + + public ResultData getResult() { + return result; + } + + public StatementResponsePb setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + public StatementResponsePb setStatus(StatementStatus status) { + this.status = status; + return this; + } + + public StatementStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatementResponsePb that = (StatementResponsePb) o; + return Objects.equals(manifest, that.manifest) + && Objects.equals(result, that.result) + && Objects.equals(statementId, that.statementId) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(manifest, result, statementId, status); + } + + @Override + public String toString() { + return new ToStringer(StatementResponsePb.class) + .add("manifest", manifest) + .add("result", result) + .add("statementId", statementId) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java index e67cb876d..be0b7ee0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** The status response includes execution state and if relevant, error information. */ @Generated +@JsonSerialize(using = StatementStatus.StatementStatusSerializer.class) +@JsonDeserialize(using = StatementStatus.StatementStatusDeserializer.class) public class StatementStatus { /** */ - @JsonProperty("error") private ServiceError error; /** @@ -21,7 +31,6 @@ public class StatementStatus { * can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: * execution successful, and statement closed; result no longer available for fetch */ - @JsonProperty("state") private StatementState state; public StatementStatus setError(ServiceError error) { @@ -59,4 +68,40 @@ public int hashCode() { public String toString() { return new ToStringer(StatementStatus.class).add("error", error).add("state", state).toString(); } + + StatementStatusPb toPb() { + StatementStatusPb pb = new StatementStatusPb(); + pb.setError(error); + pb.setState(state); + + return pb; + } + + static StatementStatus fromPb(StatementStatusPb pb) { + StatementStatus model = new StatementStatus(); + model.setError(pb.getError()); + model.setState(pb.getState()); + + return model; + } + + public static class StatementStatusSerializer extends JsonSerializer { + @Override + public void serialize(StatementStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StatementStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StatementStatusDeserializer extends JsonDeserializer { + @Override + public StatementStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StatementStatusPb pb = mapper.readValue(p, StatementStatusPb.class); + return StatementStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatusPb.java new file mode 100755 index 000000000..c4d0cdd18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatusPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The status response includes execution state and if relevant, error information. */ +@Generated +class StatementStatusPb { + @JsonProperty("error") + private ServiceError error; + + @JsonProperty("state") + private StatementState state; + + public StatementStatusPb setError(ServiceError error) { + this.error = error; + return this; + } + + public ServiceError getError() { + return error; + } + + public StatementStatusPb setState(StatementState state) { + this.state = state; + return this; + } + + public StatementState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatementStatusPb that = (StatementStatusPb) o; + return Objects.equals(error, that.error) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(error, state); + } + + @Override + public String toString() { + return new ToStringer(StatementStatusPb.class) + .add("error", error) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java index 01d8b2402..2d89c5ee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Stop a warehouse */ @Generated +@JsonSerialize(using = StopRequest.StopRequestSerializer.class) +@JsonDeserialize(using = StopRequest.StopRequestDeserializer.class) public class StopRequest { /** Required. Id of the SQL warehouse. */ - @JsonIgnore private String id; + private String id; public StopRequest setId(String id) { this.id = id; @@ -39,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(StopRequest.class).add("id", id).toString(); } + + StopRequestPb toPb() { + StopRequestPb pb = new StopRequestPb(); + pb.setId(id); + + return pb; + } + + static StopRequest fromPb(StopRequestPb pb) { + StopRequest model = new StopRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class StopRequestSerializer extends JsonSerializer { + @Override + public void serialize(StopRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StopRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StopRequestDeserializer extends JsonDeserializer { + @Override + public StopRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StopRequestPb pb = mapper.readValue(p, StopRequestPb.class); + return StopRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequestPb.java new file mode 100755 index 000000000..1856b5ac8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Stop a warehouse */ +@Generated +class StopRequestPb { + @JsonIgnore private String id; + + public StopRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StopRequestPb that = (StopRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(StopRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java index e98e257e7..fe190b97e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = StopWarehouseResponse.StopWarehouseResponseSerializer.class) +@JsonDeserialize(using = StopWarehouseResponse.StopWarehouseResponseDeserializer.class) public class StopWarehouseResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(StopWarehouseResponse.class).toString(); } + + StopWarehouseResponsePb toPb() { + StopWarehouseResponsePb pb = new StopWarehouseResponsePb(); + + return pb; + } + + static StopWarehouseResponse fromPb(StopWarehouseResponsePb pb) { + StopWarehouseResponse model = new StopWarehouseResponse(); + + return model; + } + + public static class StopWarehouseResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + StopWarehouseResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StopWarehouseResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StopWarehouseResponseDeserializer + extends JsonDeserializer { + @Override + public StopWarehouseResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StopWarehouseResponsePb pb = mapper.readValue(p, StopWarehouseResponsePb.class); + return StopWarehouseResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponsePb.java new file mode 100755 index 000000000..b47f6bb1a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class StopWarehouseResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(StopWarehouseResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java index f59f71386..dccbb15ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Success.SuccessSerializer.class) +@JsonDeserialize(using = Success.SuccessDeserializer.class) public class Success { /** */ - @JsonProperty("message") private SuccessMessage message; public Success setMessage(SuccessMessage message) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(Success.class).add("message", message).toString(); } + + SuccessPb toPb() { + SuccessPb pb = new SuccessPb(); + pb.setMessage(message); + + return pb; + } + + static Success fromPb(SuccessPb pb) { + Success model = new Success(); + model.setMessage(pb.getMessage()); + + return model; + } + + public static class SuccessSerializer extends JsonSerializer { + @Override + public void serialize(Success value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SuccessPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SuccessDeserializer extends JsonDeserializer { + @Override + public Success deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SuccessPb pb = mapper.readValue(p, SuccessPb.class); + return Success.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessPb.java new file mode 100755 index 000000000..753260fe2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SuccessPb { + @JsonProperty("message") + private SuccessMessage message; + + public SuccessPb setMessage(SuccessMessage message) { + this.message = message; + return this; + } + + public SuccessMessage getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SuccessPb that = (SuccessPb) o; + return Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(message); + } + + @Override + public String toString() { + return new ToStringer(SuccessPb.class).add("message", message).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java new file mode 100755 index 000000000..ff46d3b74 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Objects; + +@Generated +@JsonSerialize(using = TaskTimeOverRange.TaskTimeOverRangeSerializer.class) +@JsonDeserialize(using = TaskTimeOverRange.TaskTimeOverRangeDeserializer.class) +public class TaskTimeOverRange { + /** */ + private Collection entries; + + /** + * interval length for all entries (difference in start time and end time of an entry range) the + * same for all entries start time of first interval is query_start_time_ms + */ + private Long interval; + + public TaskTimeOverRange setEntries(Collection entries) { + this.entries = entries; + return this; + } + + public Collection getEntries() { + return entries; + } + + public TaskTimeOverRange setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRange that = (TaskTimeOverRange) o; + return Objects.equals(entries, that.entries) && Objects.equals(interval, that.interval); + } + + @Override + public int hashCode() { + return Objects.hash(entries, interval); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRange.class) + .add("entries", entries) + .add("interval", interval) + .toString(); + } + + TaskTimeOverRangePb toPb() { + TaskTimeOverRangePb pb = new TaskTimeOverRangePb(); + pb.setEntries(entries); + pb.setInterval(interval); + + return pb; + } + + static TaskTimeOverRange fromPb(TaskTimeOverRangePb pb) { + TaskTimeOverRange model = new TaskTimeOverRange(); + model.setEntries(pb.getEntries()); + model.setInterval(pb.getInterval()); + + return model; + } + + public static class TaskTimeOverRangeSerializer extends JsonSerializer { + @Override + public void serialize(TaskTimeOverRange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskTimeOverRangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskTimeOverRangeDeserializer extends JsonDeserializer { + @Override + public TaskTimeOverRange deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskTimeOverRangePb pb = mapper.readValue(p, TaskTimeOverRangePb.class); + return TaskTimeOverRange.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java new file mode 100755 index 000000000..cbb5d2ea0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java @@ -0,0 +1,93 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Objects; + +@Generated +@JsonSerialize(using = TaskTimeOverRangeEntry.TaskTimeOverRangeEntrySerializer.class) +@JsonDeserialize(using = TaskTimeOverRangeEntry.TaskTimeOverRangeEntryDeserializer.class) +public class TaskTimeOverRangeEntry { + /** + * total task completion time in this time range, aggregated over all stages and jobs in the query + */ + private Long taskCompletedTimeMs; + + public TaskTimeOverRangeEntry setTaskCompletedTimeMs(Long taskCompletedTimeMs) { + this.taskCompletedTimeMs = taskCompletedTimeMs; + return this; + } + + public Long getTaskCompletedTimeMs() { + return taskCompletedTimeMs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRangeEntry that = (TaskTimeOverRangeEntry) o; + return Objects.equals(taskCompletedTimeMs, that.taskCompletedTimeMs); + } + + @Override + public int hashCode() { + return Objects.hash(taskCompletedTimeMs); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRangeEntry.class) + .add("taskCompletedTimeMs", taskCompletedTimeMs) + .toString(); + } + + TaskTimeOverRangeEntryPb toPb() { + TaskTimeOverRangeEntryPb pb = new TaskTimeOverRangeEntryPb(); + pb.setTaskCompletedTimeMs(taskCompletedTimeMs); + + return pb; + } + + static TaskTimeOverRangeEntry fromPb(TaskTimeOverRangeEntryPb pb) { + TaskTimeOverRangeEntry model = new TaskTimeOverRangeEntry(); + model.setTaskCompletedTimeMs(pb.getTaskCompletedTimeMs()); + + return model; + } + + public static class TaskTimeOverRangeEntrySerializer + extends JsonSerializer { + @Override + public void serialize( + TaskTimeOverRangeEntry value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TaskTimeOverRangeEntryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TaskTimeOverRangeEntryDeserializer + extends JsonDeserializer { + @Override + public TaskTimeOverRangeEntry deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TaskTimeOverRangeEntryPb pb = mapper.readValue(p, TaskTimeOverRangeEntryPb.class); + return TaskTimeOverRangeEntry.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntryPb.java new file mode 100755 index 000000000..71d80e889 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntryPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TaskTimeOverRangeEntryPb { + @JsonProperty("task_completed_time_ms") + private Long taskCompletedTimeMs; + + public TaskTimeOverRangeEntryPb setTaskCompletedTimeMs(Long taskCompletedTimeMs) { + this.taskCompletedTimeMs = taskCompletedTimeMs; + return this; + } + + public Long getTaskCompletedTimeMs() { + return taskCompletedTimeMs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRangeEntryPb that = (TaskTimeOverRangeEntryPb) o; + return Objects.equals(taskCompletedTimeMs, that.taskCompletedTimeMs); + } + + @Override + public int hashCode() { + return Objects.hash(taskCompletedTimeMs); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRangeEntryPb.class) + .add("taskCompletedTimeMs", taskCompletedTimeMs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangePb.java new file mode 100755 index 000000000..8ded9dc2a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class TaskTimeOverRangePb { + @JsonProperty("entries") + private Collection entries; + + @JsonProperty("interval") + private Long interval; + + public TaskTimeOverRangePb setEntries(Collection entries) { + this.entries = entries; + return this; + } + + public Collection getEntries() { + return entries; + } + + public TaskTimeOverRangePb setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRangePb that = (TaskTimeOverRangePb) o; + return Objects.equals(entries, that.entries) && Objects.equals(interval, that.interval); + } + + @Override + public int hashCode() { + return Objects.hash(entries, interval); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRangePb.class) + .add("entries", entries) + .add("interval", interval) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java index 2992d0e22..defdbe642 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Map; import java.util.Objects; @Generated +@JsonSerialize(using = TerminationReason.TerminationReasonSerializer.class) +@JsonDeserialize(using = TerminationReason.TerminationReasonDeserializer.class) public class TerminationReason { /** status code indicating why the cluster was terminated */ - @JsonProperty("code") private TerminationReasonCode code; /** list of parameters that provide additional information about why the cluster was terminated */ - @JsonProperty("parameters") private Map parameters; /** type of the termination */ - @JsonProperty("type") private TerminationReasonType typeValue; public TerminationReason setCode(TerminationReasonCode code) { @@ -72,4 +80,42 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + TerminationReasonPb toPb() { + TerminationReasonPb pb = new TerminationReasonPb(); + pb.setCode(code); + pb.setParameters(parameters); + pb.setType(typeValue); + + return pb; + } + + static TerminationReason fromPb(TerminationReasonPb pb) { + TerminationReason model = new TerminationReason(); + model.setCode(pb.getCode()); + model.setParameters(pb.getParameters()); + model.setType(pb.getType()); + + return model; + } + + public static class TerminationReasonSerializer extends JsonSerializer { + @Override + public void serialize(TerminationReason value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TerminationReasonPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TerminationReasonDeserializer extends JsonDeserializer { + @Override + public TerminationReason deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TerminationReasonPb pb = mapper.readValue(p, TerminationReasonPb.class); + return TerminationReason.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonPb.java new file mode 100755 index 000000000..01c264a08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@Generated +class TerminationReasonPb { + @JsonProperty("code") + private TerminationReasonCode code; + + @JsonProperty("parameters") + private Map parameters; + + @JsonProperty("type") + private TerminationReasonType typeValue; + + public TerminationReasonPb setCode(TerminationReasonCode code) { + this.code = code; + return this; + } + + public TerminationReasonCode getCode() { + return code; + } + + public TerminationReasonPb setParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + public Map getParameters() { + return parameters; + } + + public TerminationReasonPb setType(TerminationReasonType typeValue) { + this.typeValue = typeValue; + return this; + } + + public TerminationReasonType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TerminationReasonPb that = (TerminationReasonPb) o; + return Objects.equals(code, that.code) + && Objects.equals(parameters, that.parameters) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(code, parameters, typeValue); + } + + @Override + public String toString() { + return new ToStringer(TerminationReasonPb.class) + .add("code", code) + .add("parameters", parameters) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValue.java index d57997e4e..6f4d402cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValue.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TextValue.TextValueSerializer.class) +@JsonDeserialize(using = TextValue.TextValueDeserializer.class) public class TextValue { /** */ - @JsonProperty("value") private String value; public TextValue setValue(String value) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(TextValue.class).add("value", value).toString(); } + + TextValuePb toPb() { + TextValuePb pb = new TextValuePb(); + pb.setValue(value); + + return pb; + } + + static TextValue fromPb(TextValuePb pb) { + TextValue model = new TextValue(); + model.setValue(pb.getValue()); + + return model; + } + + public static class TextValueSerializer extends JsonSerializer { + @Override + public void serialize(TextValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TextValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TextValueDeserializer extends JsonDeserializer { + @Override + public TextValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TextValuePb pb = mapper.readValue(p, TextValuePb.class); + return TextValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValuePb.java new file mode 100755 index 000000000..2951da060 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TextValuePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TextValuePb { + @JsonProperty("value") + private String value; + + public TextValuePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TextValuePb that = (TextValuePb) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + + @Override + public String toString() { + return new ToStringer(TextValuePb.class).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRange.java index 93cdd858c..a9bea1bc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRange.java @@ -3,21 +3,27 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TimeRange.TimeRangeSerializer.class) +@JsonDeserialize(using = TimeRange.TimeRangeDeserializer.class) public class TimeRange { /** The end time in milliseconds. */ - @JsonProperty("end_time_ms") - @QueryParam("end_time_ms") private Long endTimeMs; /** The start time in milliseconds. */ - @JsonProperty("start_time_ms") - @QueryParam("start_time_ms") private Long startTimeMs; public TimeRange setEndTimeMs(Long endTimeMs) { @@ -59,4 +65,39 @@ public String toString() { .add("startTimeMs", startTimeMs) .toString(); } + + TimeRangePb toPb() { + TimeRangePb pb = new TimeRangePb(); + pb.setEndTimeMs(endTimeMs); + pb.setStartTimeMs(startTimeMs); + + return pb; + } + + static TimeRange fromPb(TimeRangePb pb) { + TimeRange model = new TimeRange(); + model.setEndTimeMs(pb.getEndTimeMs()); + model.setStartTimeMs(pb.getStartTimeMs()); + + return model; + } + + public static class TimeRangeSerializer extends JsonSerializer { + @Override + public void serialize(TimeRange value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TimeRangePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TimeRangeDeserializer extends JsonDeserializer { + @Override + public TimeRange deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TimeRangePb pb = mapper.readValue(p, TimeRangePb.class); + return TimeRange.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRangePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRangePb.java new file mode 100755 index 000000000..eef365806 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRangePb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TimeRangePb { + @JsonProperty("end_time_ms") + @QueryParam("end_time_ms") + private Long endTimeMs; + + @JsonProperty("start_time_ms") + @QueryParam("start_time_ms") + private Long startTimeMs; + + public TimeRangePb setEndTimeMs(Long endTimeMs) { + this.endTimeMs = endTimeMs; + return this; + } + + public Long getEndTimeMs() { + return endTimeMs; + } + + public TimeRangePb setStartTimeMs(Long startTimeMs) { + this.startTimeMs = startTimeMs; + return this; + } + + public Long getStartTimeMs() { + return startTimeMs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TimeRangePb that = (TimeRangePb) o; + return Objects.equals(endTimeMs, that.endTimeMs) + && Objects.equals(startTimeMs, that.startTimeMs); + } + + @Override + public int hashCode() { + return Objects.hash(endTimeMs, startTimeMs); + } + + @Override + public String toString() { + return new ToStringer(TimeRangePb.class) + .add("endTimeMs", endTimeMs) + .add("startTimeMs", startTimeMs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectId.java index 84f8542a8..087368ce1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectId.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectId.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = TransferOwnershipObjectId.TransferOwnershipObjectIdSerializer.class) +@JsonDeserialize(using = TransferOwnershipObjectId.TransferOwnershipObjectIdDeserializer.class) public class TransferOwnershipObjectId { /** Email address for the new owner, who must exist in the workspace. */ - @JsonProperty("new_owner") private String newOwner; public TransferOwnershipObjectId setNewOwner(String newOwner) { @@ -39,4 +49,41 @@ public int hashCode() { public String toString() { return new ToStringer(TransferOwnershipObjectId.class).add("newOwner", newOwner).toString(); } + + TransferOwnershipObjectIdPb toPb() { + TransferOwnershipObjectIdPb pb = new TransferOwnershipObjectIdPb(); + pb.setNewOwner(newOwner); + + return pb; + } + + static TransferOwnershipObjectId fromPb(TransferOwnershipObjectIdPb pb) { + TransferOwnershipObjectId model = new TransferOwnershipObjectId(); + model.setNewOwner(pb.getNewOwner()); + + return model; + } + + public static class TransferOwnershipObjectIdSerializer + extends JsonSerializer { + @Override + public void serialize( + TransferOwnershipObjectId value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TransferOwnershipObjectIdPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TransferOwnershipObjectIdDeserializer + extends JsonDeserializer { + @Override + public TransferOwnershipObjectId deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TransferOwnershipObjectIdPb pb = mapper.readValue(p, TransferOwnershipObjectIdPb.class); + return TransferOwnershipObjectId.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectIdPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectIdPb.java new file mode 100755 index 000000000..a56cee9b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectIdPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class TransferOwnershipObjectIdPb { + @JsonProperty("new_owner") + private String newOwner; + + public TransferOwnershipObjectIdPb setNewOwner(String newOwner) { + this.newOwner = newOwner; + return this; + } + + public String getNewOwner() { + return newOwner; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransferOwnershipObjectIdPb that = (TransferOwnershipObjectIdPb) o; + return Objects.equals(newOwner, that.newOwner); + } + + @Override + public int hashCode() { + return Objects.hash(newOwner); + } + + @Override + public String toString() { + return new ToStringer(TransferOwnershipObjectIdPb.class).add("newOwner", newOwner).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequest.java index bd770b6bc..198af27f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequest.java @@ -4,22 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Transfer object ownership */ @Generated +@JsonSerialize(using = TransferOwnershipRequest.TransferOwnershipRequestSerializer.class) +@JsonDeserialize(using = TransferOwnershipRequest.TransferOwnershipRequestDeserializer.class) public class TransferOwnershipRequest { /** Email address for the new owner, who must exist in the workspace. */ - @JsonProperty("new_owner") private String newOwner; /** The ID of the object on which to change ownership. */ - @JsonIgnore private TransferOwnershipObjectId objectId; + private TransferOwnershipObjectId objectId; /** The type of object on which to change ownership. */ - @JsonIgnore private OwnableObjectType objectType; + private OwnableObjectType objectType; public TransferOwnershipRequest setNewOwner(String newOwner) { this.newOwner = newOwner; @@ -71,4 +80,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + TransferOwnershipRequestPb toPb() { + TransferOwnershipRequestPb pb = new TransferOwnershipRequestPb(); + pb.setNewOwner(newOwner); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static TransferOwnershipRequest fromPb(TransferOwnershipRequestPb pb) { + TransferOwnershipRequest model = new TransferOwnershipRequest(); + model.setNewOwner(pb.getNewOwner()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class TransferOwnershipRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + TransferOwnershipRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TransferOwnershipRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TransferOwnershipRequestDeserializer + extends JsonDeserializer { + @Override + public TransferOwnershipRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TransferOwnershipRequestPb pb = mapper.readValue(p, TransferOwnershipRequestPb.class); + return TransferOwnershipRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequestPb.java new file mode 100755 index 000000000..ac3ae8967 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Transfer object ownership */ +@Generated +class TransferOwnershipRequestPb { + @JsonProperty("new_owner") + private String newOwner; + + @JsonIgnore private TransferOwnershipObjectId objectId; + + @JsonIgnore private OwnableObjectType objectType; + + public TransferOwnershipRequestPb setNewOwner(String newOwner) { + this.newOwner = newOwner; + return this; + } + + public String getNewOwner() { + return newOwner; + } + + public TransferOwnershipRequestPb setObjectId(TransferOwnershipObjectId objectId) { + this.objectId = objectId; + return this; + } + + public TransferOwnershipObjectId getObjectId() { + return objectId; + } + + public TransferOwnershipRequestPb setObjectType(OwnableObjectType objectType) { + this.objectType = objectType; + return this; + } + + public OwnableObjectType getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TransferOwnershipRequestPb that = (TransferOwnershipRequestPb) o; + return Objects.equals(newOwner, that.newOwner) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(newOwner, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(TransferOwnershipRequestPb.class) + .add("newOwner", newOwner) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequest.java index 55b889bad..892edb648 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an alert */ @Generated +@JsonSerialize(using = TrashAlertRequest.TrashAlertRequestSerializer.class) +@JsonDeserialize(using = TrashAlertRequest.TrashAlertRequestDeserializer.class) public class TrashAlertRequest { /** */ - @JsonIgnore private String id; + private String id; public TrashAlertRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(TrashAlertRequest.class).add("id", id).toString(); } + + TrashAlertRequestPb toPb() { + TrashAlertRequestPb pb = new TrashAlertRequestPb(); + pb.setId(id); + + return pb; + } + + static TrashAlertRequest fromPb(TrashAlertRequestPb pb) { + TrashAlertRequest model = new TrashAlertRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class TrashAlertRequestSerializer extends JsonSerializer { + @Override + public void serialize(TrashAlertRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrashAlertRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrashAlertRequestDeserializer extends JsonDeserializer { + @Override + public TrashAlertRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrashAlertRequestPb pb = mapper.readValue(p, TrashAlertRequestPb.class); + return TrashAlertRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequestPb.java new file mode 100755 index 000000000..6f1dcaf3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an alert */ +@Generated +class TrashAlertRequestPb { + @JsonIgnore private String id; + + public TrashAlertRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrashAlertRequestPb that = (TrashAlertRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(TrashAlertRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java index 819a17a3f..f8788f792 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an alert */ @Generated +@JsonSerialize(using = TrashAlertV2Request.TrashAlertV2RequestSerializer.class) +@JsonDeserialize(using = TrashAlertV2Request.TrashAlertV2RequestDeserializer.class) public class TrashAlertV2Request { /** */ - @JsonIgnore private String id; + private String id; public TrashAlertV2Request setId(String id) { this.id = id; @@ -39,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(TrashAlertV2Request.class).add("id", id).toString(); } + + TrashAlertV2RequestPb toPb() { + TrashAlertV2RequestPb pb = new TrashAlertV2RequestPb(); + pb.setId(id); + + return pb; + } + + static TrashAlertV2Request fromPb(TrashAlertV2RequestPb pb) { + TrashAlertV2Request model = new TrashAlertV2Request(); + model.setId(pb.getId()); + + return model; + } + + public static class TrashAlertV2RequestSerializer extends JsonSerializer { + @Override + public void serialize(TrashAlertV2Request value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrashAlertV2RequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrashAlertV2RequestDeserializer + extends JsonDeserializer { + @Override + public TrashAlertV2Request deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrashAlertV2RequestPb pb = mapper.readValue(p, TrashAlertV2RequestPb.class); + return TrashAlertV2Request.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2RequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2RequestPb.java new file mode 100755 index 000000000..4504f5f27 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2RequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an alert */ +@Generated +class TrashAlertV2RequestPb { + @JsonIgnore private String id; + + public TrashAlertV2RequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrashAlertV2RequestPb that = (TrashAlertV2RequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(TrashAlertV2RequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequest.java index dbfefcd9b..c75f6916c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a query */ @Generated +@JsonSerialize(using = TrashQueryRequest.TrashQueryRequestSerializer.class) +@JsonDeserialize(using = TrashQueryRequest.TrashQueryRequestDeserializer.class) public class TrashQueryRequest { /** */ - @JsonIgnore private String id; + private String id; public TrashQueryRequest setId(String id) { this.id = id; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(TrashQueryRequest.class).add("id", id).toString(); } + + TrashQueryRequestPb toPb() { + TrashQueryRequestPb pb = new TrashQueryRequestPb(); + pb.setId(id); + + return pb; + } + + static TrashQueryRequest fromPb(TrashQueryRequestPb pb) { + TrashQueryRequest model = new TrashQueryRequest(); + model.setId(pb.getId()); + + return model; + } + + public static class TrashQueryRequestSerializer extends JsonSerializer { + @Override + public void serialize(TrashQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TrashQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TrashQueryRequestDeserializer extends JsonDeserializer { + @Override + public TrashQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TrashQueryRequestPb pb = mapper.readValue(p, TrashQueryRequestPb.class); + return TrashQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequestPb.java new file mode 100755 index 000000000..8cdf394c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a query */ +@Generated +class TrashQueryRequestPb { + @JsonIgnore private String id; + + public TrashQueryRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrashQueryRequestPb that = (TrashQueryRequestPb) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(TrashQueryRequestPb.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java index a0ed02ec2..c3766d56e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateAlertRequest.UpdateAlertRequestSerializer.class) +@JsonDeserialize(using = UpdateAlertRequest.UpdateAlertRequestDeserializer.class) public class UpdateAlertRequest { /** */ - @JsonProperty("alert") private UpdateAlertRequestAlert alert; /** * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the * alert's display name conflicts with an existing alert's display name. */ - @JsonProperty("auto_resolve_display_name") private Boolean autoResolveDisplayName; /** */ - @JsonIgnore private String id; + private String id; /** * The field mask must be a single string, with multiple fields separated by commas (no spaces). @@ -35,7 +43,6 @@ public class UpdateAlertRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("update_mask") private String updateMask; public UpdateAlertRequest setAlert(UpdateAlertRequestAlert alert) { @@ -99,4 +106,44 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateAlertRequestPb toPb() { + UpdateAlertRequestPb pb = new UpdateAlertRequestPb(); + pb.setAlert(alert); + pb.setAutoResolveDisplayName(autoResolveDisplayName); + pb.setId(id); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateAlertRequest fromPb(UpdateAlertRequestPb pb) { + UpdateAlertRequest model = new UpdateAlertRequest(); + model.setAlert(pb.getAlert()); + model.setAutoResolveDisplayName(pb.getAutoResolveDisplayName()); + model.setId(pb.getId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateAlertRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateAlertRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAlertRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAlertRequestDeserializer extends JsonDeserializer { + @Override + public UpdateAlertRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAlertRequestPb pb = mapper.readValue(p, UpdateAlertRequestPb.class); + return UpdateAlertRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java index cae7b393b..bd1bdd14c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateAlertRequestAlert.UpdateAlertRequestAlertSerializer.class) +@JsonDeserialize(using = UpdateAlertRequestAlert.UpdateAlertRequestAlertDeserializer.class) public class UpdateAlertRequestAlert { /** Trigger conditions of the alert. */ - @JsonProperty("condition") private AlertCondition condition; /** @@ -18,7 +28,6 @@ public class UpdateAlertRequestAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_body") private String customBody; /** @@ -27,30 +36,24 @@ public class UpdateAlertRequestAlert { * *

[here]: https://docs.databricks.com/sql/user/alerts/index.html */ - @JsonProperty("custom_subject") private String customSubject; /** The display name of the alert. */ - @JsonProperty("display_name") private String displayName; /** Whether to notify alert subscribers when alert returns back to normal. */ - @JsonProperty("notify_on_ok") private Boolean notifyOnOk; /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** UUID of the query attached to the alert. */ - @JsonProperty("query_id") private String queryId; /** * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it * can be triggered again. If 0 or not specified, the alert will not be triggered again. */ - @JsonProperty("seconds_to_retrigger") private Long secondsToRetrigger; public UpdateAlertRequestAlert setCondition(AlertCondition condition) { @@ -166,4 +169,55 @@ public String toString() { .add("secondsToRetrigger", secondsToRetrigger) .toString(); } + + UpdateAlertRequestAlertPb toPb() { + UpdateAlertRequestAlertPb pb = new UpdateAlertRequestAlertPb(); + pb.setCondition(condition); + pb.setCustomBody(customBody); + pb.setCustomSubject(customSubject); + pb.setDisplayName(displayName); + pb.setNotifyOnOk(notifyOnOk); + pb.setOwnerUserName(ownerUserName); + pb.setQueryId(queryId); + pb.setSecondsToRetrigger(secondsToRetrigger); + + return pb; + } + + static UpdateAlertRequestAlert fromPb(UpdateAlertRequestAlertPb pb) { + UpdateAlertRequestAlert model = new UpdateAlertRequestAlert(); + model.setCondition(pb.getCondition()); + model.setCustomBody(pb.getCustomBody()); + model.setCustomSubject(pb.getCustomSubject()); + model.setDisplayName(pb.getDisplayName()); + model.setNotifyOnOk(pb.getNotifyOnOk()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setQueryId(pb.getQueryId()); + model.setSecondsToRetrigger(pb.getSecondsToRetrigger()); + + return model; + } + + public static class UpdateAlertRequestAlertSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateAlertRequestAlert value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAlertRequestAlertPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAlertRequestAlertDeserializer + extends JsonDeserializer { + @Override + public UpdateAlertRequestAlert deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAlertRequestAlertPb pb = mapper.readValue(p, UpdateAlertRequestAlertPb.class); + return UpdateAlertRequestAlert.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlertPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlertPb.java new file mode 100755 index 000000000..6e18da8e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlertPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateAlertRequestAlertPb { + @JsonProperty("condition") + private AlertCondition condition; + + @JsonProperty("custom_body") + private String customBody; + + @JsonProperty("custom_subject") + private String customSubject; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("seconds_to_retrigger") + private Long secondsToRetrigger; + + public UpdateAlertRequestAlertPb setCondition(AlertCondition condition) { + this.condition = condition; + return this; + } + + public AlertCondition getCondition() { + return condition; + } + + public UpdateAlertRequestAlertPb setCustomBody(String customBody) { + this.customBody = customBody; + return this; + } + + public String getCustomBody() { + return customBody; + } + + public UpdateAlertRequestAlertPb setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public UpdateAlertRequestAlertPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateAlertRequestAlertPb setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + + public UpdateAlertRequestAlertPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public UpdateAlertRequestAlertPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public UpdateAlertRequestAlertPb setSecondsToRetrigger(Long secondsToRetrigger) { + this.secondsToRetrigger = secondsToRetrigger; + return this; + } + + public Long getSecondsToRetrigger() { + return secondsToRetrigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAlertRequestAlertPb that = (UpdateAlertRequestAlertPb) o; + return Objects.equals(condition, that.condition) + && Objects.equals(customBody, that.customBody) + && Objects.equals(customSubject, that.customSubject) + && Objects.equals(displayName, that.displayName) + && Objects.equals(notifyOnOk, that.notifyOnOk) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(queryId, that.queryId) + && Objects.equals(secondsToRetrigger, that.secondsToRetrigger); + } + + @Override + public int hashCode() { + return Objects.hash( + condition, + customBody, + customSubject, + displayName, + notifyOnOk, + ownerUserName, + queryId, + secondsToRetrigger); + } + + @Override + public String toString() { + return new ToStringer(UpdateAlertRequestAlertPb.class) + .add("condition", condition) + .add("customBody", customBody) + .add("customSubject", customSubject) + .add("displayName", displayName) + .add("notifyOnOk", notifyOnOk) + .add("ownerUserName", ownerUserName) + .add("queryId", queryId) + .add("secondsToRetrigger", secondsToRetrigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestPb.java new file mode 100755 index 000000000..63ee46dcb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateAlertRequestPb { + @JsonProperty("alert") + private UpdateAlertRequestAlert alert; + + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + + @JsonIgnore private String id; + + @JsonProperty("update_mask") + private String updateMask; + + public UpdateAlertRequestPb setAlert(UpdateAlertRequestAlert alert) { + this.alert = alert; + return this; + } + + public UpdateAlertRequestAlert getAlert() { + return alert; + } + + public UpdateAlertRequestPb setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + + public UpdateAlertRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateAlertRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAlertRequestPb that = (UpdateAlertRequestPb) o; + return Objects.equals(alert, that.alert) + && Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName) + && Objects.equals(id, that.id) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(alert, autoResolveDisplayName, id, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateAlertRequestPb.class) + .add("alert", alert) + .add("autoResolveDisplayName", autoResolveDisplayName) + .add("id", id) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java index 1e43290d8..bdaa96ec4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java @@ -3,21 +3,29 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Update an alert */ @Generated +@JsonSerialize(using = UpdateAlertV2Request.UpdateAlertV2RequestSerializer.class) +@JsonDeserialize(using = UpdateAlertV2Request.UpdateAlertV2RequestDeserializer.class) public class UpdateAlertV2Request { /** */ - @JsonProperty("alert") private AlertV2 alert; /** UUID identifying the alert. */ - @JsonIgnore private String id; + private String id; /** * The field mask must be a single string, with multiple fields separated by commas (no spaces). @@ -30,8 +38,6 @@ public class UpdateAlertV2Request { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonIgnore - @QueryParam("update_mask") private String updateMask; public UpdateAlertV2Request setAlert(AlertV2 alert) { @@ -84,4 +90,44 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateAlertV2RequestPb toPb() { + UpdateAlertV2RequestPb pb = new UpdateAlertV2RequestPb(); + pb.setAlert(alert); + pb.setId(id); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateAlertV2Request fromPb(UpdateAlertV2RequestPb pb) { + UpdateAlertV2Request model = new UpdateAlertV2Request(); + model.setAlert(pb.getAlert()); + model.setId(pb.getId()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateAlertV2RequestSerializer extends JsonSerializer { + @Override + public void serialize( + UpdateAlertV2Request value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateAlertV2RequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateAlertV2RequestDeserializer + extends JsonDeserializer { + @Override + public UpdateAlertV2Request deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateAlertV2RequestPb pb = mapper.readValue(p, UpdateAlertV2RequestPb.class); + return UpdateAlertV2Request.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2RequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2RequestPb.java new file mode 100755 index 000000000..097cc955f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2RequestPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update an alert */ +@Generated +class UpdateAlertV2RequestPb { + @JsonProperty("alert") + private AlertV2 alert; + + @JsonIgnore private String id; + + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateAlertV2RequestPb setAlert(AlertV2 alert) { + this.alert = alert; + return this; + } + + public AlertV2 getAlert() { + return alert; + } + + public UpdateAlertV2RequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateAlertV2RequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAlertV2RequestPb that = (UpdateAlertV2RequestPb) o; + return Objects.equals(alert, that.alert) + && Objects.equals(id, that.id) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(alert, id, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateAlertV2RequestPb.class) + .add("alert", alert) + .add("id", id) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java index 46be75273..7794a0c37 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateQueryRequest.UpdateQueryRequestSerializer.class) +@JsonDeserialize(using = UpdateQueryRequest.UpdateQueryRequestDeserializer.class) public class UpdateQueryRequest { /** * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the * alert's display name conflicts with an existing alert's display name. */ - @JsonProperty("auto_resolve_display_name") private Boolean autoResolveDisplayName; /** */ - @JsonIgnore private String id; + private String id; /** */ - @JsonProperty("query") private UpdateQueryRequestQuery query; /** @@ -35,7 +43,6 @@ public class UpdateQueryRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("update_mask") private String updateMask; public UpdateQueryRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) { @@ -99,4 +106,44 @@ public String toString() { .add("updateMask", updateMask) .toString(); } + + UpdateQueryRequestPb toPb() { + UpdateQueryRequestPb pb = new UpdateQueryRequestPb(); + pb.setAutoResolveDisplayName(autoResolveDisplayName); + pb.setId(id); + pb.setQuery(query); + pb.setUpdateMask(updateMask); + + return pb; + } + + static UpdateQueryRequest fromPb(UpdateQueryRequestPb pb) { + UpdateQueryRequest model = new UpdateQueryRequest(); + model.setAutoResolveDisplayName(pb.getAutoResolveDisplayName()); + model.setId(pb.getId()); + model.setQuery(pb.getQuery()); + model.setUpdateMask(pb.getUpdateMask()); + + return model; + } + + public static class UpdateQueryRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateQueryRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateQueryRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateQueryRequestDeserializer extends JsonDeserializer { + @Override + public UpdateQueryRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateQueryRequestPb pb = mapper.readValue(p, UpdateQueryRequestPb.class); + return UpdateQueryRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestPb.java new file mode 100755 index 000000000..b77847c02 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateQueryRequestPb { + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + + @JsonIgnore private String id; + + @JsonProperty("query") + private UpdateQueryRequestQuery query; + + @JsonProperty("update_mask") + private String updateMask; + + public UpdateQueryRequestPb setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + + public UpdateQueryRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateQueryRequestPb setQuery(UpdateQueryRequestQuery query) { + this.query = query; + return this; + } + + public UpdateQueryRequestQuery getQuery() { + return query; + } + + public UpdateQueryRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateQueryRequestPb that = (UpdateQueryRequestPb) o; + return Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName) + && Objects.equals(id, that.id) + && Objects.equals(query, that.query) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(autoResolveDisplayName, id, query, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateQueryRequestPb.class) + .add("autoResolveDisplayName", autoResolveDisplayName) + .add("id", id) + .add("query", query) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java index a0d9ac14e..6a1112c9f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java @@ -4,58 +4,58 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateQueryRequestQuery.UpdateQueryRequestQuerySerializer.class) +@JsonDeserialize(using = UpdateQueryRequestQuery.UpdateQueryRequestQueryDeserializer.class) public class UpdateQueryRequestQuery { /** Whether to apply a 1000 row limit to the query result. */ - @JsonProperty("apply_auto_limit") private Boolean applyAutoLimit; /** Name of the catalog where this query will be executed. */ - @JsonProperty("catalog") private String catalog; /** * General description that conveys additional information about this query such as usage notes. */ - @JsonProperty("description") private String description; /** * Display name of the query that appears in list views, widget headings, and on the query page. */ - @JsonProperty("display_name") private String displayName; /** Username of the user that owns the query. */ - @JsonProperty("owner_user_name") private String ownerUserName; /** List of query parameter definitions. */ - @JsonProperty("parameters") private Collection parameters; /** Text of the query to be run. */ - @JsonProperty("query_text") private String queryText; /** Sets the "Run as" role for the object. */ - @JsonProperty("run_as_mode") private RunAsMode runAsMode; /** Name of the schema where this query will be executed. */ - @JsonProperty("schema") private String schema; /** */ - @JsonProperty("tags") private Collection tags; /** ID of the SQL warehouse attached to the query. */ - @JsonProperty("warehouse_id") private String warehouseId; public UpdateQueryRequestQuery setApplyAutoLimit(Boolean applyAutoLimit) { @@ -207,4 +207,61 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + UpdateQueryRequestQueryPb toPb() { + UpdateQueryRequestQueryPb pb = new UpdateQueryRequestQueryPb(); + pb.setApplyAutoLimit(applyAutoLimit); + pb.setCatalog(catalog); + pb.setDescription(description); + pb.setDisplayName(displayName); + pb.setOwnerUserName(ownerUserName); + pb.setParameters(parameters); + pb.setQueryText(queryText); + pb.setRunAsMode(runAsMode); + pb.setSchema(schema); + pb.setTags(tags); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static UpdateQueryRequestQuery fromPb(UpdateQueryRequestQueryPb pb) { + UpdateQueryRequestQuery model = new UpdateQueryRequestQuery(); + model.setApplyAutoLimit(pb.getApplyAutoLimit()); + model.setCatalog(pb.getCatalog()); + model.setDescription(pb.getDescription()); + model.setDisplayName(pb.getDisplayName()); + model.setOwnerUserName(pb.getOwnerUserName()); + model.setParameters(pb.getParameters()); + model.setQueryText(pb.getQueryText()); + model.setRunAsMode(pb.getRunAsMode()); + model.setSchema(pb.getSchema()); + model.setTags(pb.getTags()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class UpdateQueryRequestQuerySerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateQueryRequestQuery value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateQueryRequestQueryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateQueryRequestQueryDeserializer + extends JsonDeserializer { + @Override + public UpdateQueryRequestQuery deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateQueryRequestQueryPb pb = mapper.readValue(p, UpdateQueryRequestQueryPb.class); + return UpdateQueryRequestQuery.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQueryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQueryPb.java new file mode 100755 index 000000000..35b8f6a7e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQueryPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateQueryRequestQueryPb { + @JsonProperty("apply_auto_limit") + private Boolean applyAutoLimit; + + @JsonProperty("catalog") + private String catalog; + + @JsonProperty("description") + private String description; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("owner_user_name") + private String ownerUserName; + + @JsonProperty("parameters") + private Collection parameters; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("run_as_mode") + private RunAsMode runAsMode; + + @JsonProperty("schema") + private String schema; + + @JsonProperty("tags") + private Collection tags; + + @JsonProperty("warehouse_id") + private String warehouseId; + + public UpdateQueryRequestQueryPb setApplyAutoLimit(Boolean applyAutoLimit) { + this.applyAutoLimit = applyAutoLimit; + return this; + } + + public Boolean getApplyAutoLimit() { + return applyAutoLimit; + } + + public UpdateQueryRequestQueryPb setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public UpdateQueryRequestQueryPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public UpdateQueryRequestQueryPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateQueryRequestQueryPb setOwnerUserName(String ownerUserName) { + this.ownerUserName = ownerUserName; + return this; + } + + public String getOwnerUserName() { + return ownerUserName; + } + + public UpdateQueryRequestQueryPb setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + public UpdateQueryRequestQueryPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public UpdateQueryRequestQueryPb setRunAsMode(RunAsMode runAsMode) { + this.runAsMode = runAsMode; + return this; + } + + public RunAsMode getRunAsMode() { + return runAsMode; + } + + public UpdateQueryRequestQueryPb setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public UpdateQueryRequestQueryPb setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public UpdateQueryRequestQueryPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateQueryRequestQueryPb that = (UpdateQueryRequestQueryPb) o; + return Objects.equals(applyAutoLimit, that.applyAutoLimit) + && Objects.equals(catalog, that.catalog) + && Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(ownerUserName, that.ownerUserName) + && Objects.equals(parameters, that.parameters) + && Objects.equals(queryText, that.queryText) + && Objects.equals(runAsMode, that.runAsMode) + && Objects.equals(schema, that.schema) + && Objects.equals(tags, that.tags) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + applyAutoLimit, + catalog, + description, + displayName, + ownerUserName, + parameters, + queryText, + runAsMode, + schema, + tags, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(UpdateQueryRequestQueryPb.class) + .add("applyAutoLimit", applyAutoLimit) + .add("catalog", catalog) + .add("description", description) + .add("displayName", displayName) + .add("ownerUserName", ownerUserName) + .add("parameters", parameters) + .add("queryText", queryText) + .add("runAsMode", runAsMode) + .add("schema", schema) + .add("tags", tags) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java index 2db9c2631..34ef35fac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateResponse.UpdateResponseSerializer.class) +@JsonDeserialize(using = UpdateResponse.UpdateResponseDeserializer.class) public class UpdateResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateResponse.class).toString(); } + + UpdateResponsePb toPb() { + UpdateResponsePb pb = new UpdateResponsePb(); + + return pb; + } + + static UpdateResponse fromPb(UpdateResponsePb pb) { + UpdateResponse model = new UpdateResponse(); + + return model; + } + + public static class UpdateResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateResponseDeserializer extends JsonDeserializer { + @Override + public UpdateResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateResponsePb pb = mapper.readValue(p, UpdateResponsePb.class); + return UpdateResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponsePb.java new file mode 100755 index 000000000..7a47804f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java index 1cf729a01..faa07bbee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateVisualizationRequest.UpdateVisualizationRequestSerializer.class) +@JsonDeserialize(using = UpdateVisualizationRequest.UpdateVisualizationRequestDeserializer.class) public class UpdateVisualizationRequest { /** */ - @JsonIgnore private String id; + private String id; /** * The field mask must be a single string, with multiple fields separated by commas (no spaces). @@ -24,11 +34,9 @@ public class UpdateVisualizationRequest { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("update_mask") private String updateMask; /** */ - @JsonProperty("visualization") private UpdateVisualizationRequestVisualization visualization; public UpdateVisualizationRequest setId(String id) { @@ -82,4 +90,45 @@ public String toString() { .add("visualization", visualization) .toString(); } + + UpdateVisualizationRequestPb toPb() { + UpdateVisualizationRequestPb pb = new UpdateVisualizationRequestPb(); + pb.setId(id); + pb.setUpdateMask(updateMask); + pb.setVisualization(visualization); + + return pb; + } + + static UpdateVisualizationRequest fromPb(UpdateVisualizationRequestPb pb) { + UpdateVisualizationRequest model = new UpdateVisualizationRequest(); + model.setId(pb.getId()); + model.setUpdateMask(pb.getUpdateMask()); + model.setVisualization(pb.getVisualization()); + + return model; + } + + public static class UpdateVisualizationRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateVisualizationRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateVisualizationRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateVisualizationRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateVisualizationRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateVisualizationRequestPb pb = mapper.readValue(p, UpdateVisualizationRequestPb.class); + return UpdateVisualizationRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestPb.java new file mode 100755 index 000000000..9da02642a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateVisualizationRequestPb { + @JsonIgnore private String id; + + @JsonProperty("update_mask") + private String updateMask; + + @JsonProperty("visualization") + private UpdateVisualizationRequestVisualization visualization; + + public UpdateVisualizationRequestPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateVisualizationRequestPb setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + public UpdateVisualizationRequestPb setVisualization( + UpdateVisualizationRequestVisualization visualization) { + this.visualization = visualization; + return this; + } + + public UpdateVisualizationRequestVisualization getVisualization() { + return visualization; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateVisualizationRequestPb that = (UpdateVisualizationRequestPb) o; + return Objects.equals(id, that.id) + && Objects.equals(updateMask, that.updateMask) + && Objects.equals(visualization, that.visualization); + } + + @Override + public int hashCode() { + return Objects.hash(id, updateMask, visualization); + } + + @Override + public String toString() { + return new ToStringer(UpdateVisualizationRequestPb.class) + .add("id", id) + .add("updateMask", updateMask) + .add("visualization", visualization) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java index faf432786..9bd476ee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java @@ -4,31 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + UpdateVisualizationRequestVisualization.UpdateVisualizationRequestVisualizationSerializer + .class) +@JsonDeserialize( + using = + UpdateVisualizationRequestVisualization.UpdateVisualizationRequestVisualizationDeserializer + .class) public class UpdateVisualizationRequestVisualization { /** The display name of the visualization. */ - @JsonProperty("display_name") private String displayName; /** * The visualization options varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying visualization options directly. */ - @JsonProperty("serialized_options") private String serializedOptions; /** * The visualization query plan varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying the visualization query plan directly. */ - @JsonProperty("serialized_query_plan") private String serializedQueryPlan; /** The type of visualization: counter, table, funnel, and so on. */ - @JsonProperty("type") private String typeValue; public UpdateVisualizationRequestVisualization setDisplayName(String displayName) { @@ -93,4 +106,51 @@ public String toString() { .add("typeValue", typeValue) .toString(); } + + UpdateVisualizationRequestVisualizationPb toPb() { + UpdateVisualizationRequestVisualizationPb pb = new UpdateVisualizationRequestVisualizationPb(); + pb.setDisplayName(displayName); + pb.setSerializedOptions(serializedOptions); + pb.setSerializedQueryPlan(serializedQueryPlan); + pb.setType(typeValue); + + return pb; + } + + static UpdateVisualizationRequestVisualization fromPb( + UpdateVisualizationRequestVisualizationPb pb) { + UpdateVisualizationRequestVisualization model = new UpdateVisualizationRequestVisualization(); + model.setDisplayName(pb.getDisplayName()); + model.setSerializedOptions(pb.getSerializedOptions()); + model.setSerializedQueryPlan(pb.getSerializedQueryPlan()); + model.setType(pb.getType()); + + return model; + } + + public static class UpdateVisualizationRequestVisualizationSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateVisualizationRequestVisualization value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + UpdateVisualizationRequestVisualizationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateVisualizationRequestVisualizationDeserializer + extends JsonDeserializer { + @Override + public UpdateVisualizationRequestVisualization deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateVisualizationRequestVisualizationPb pb = + mapper.readValue(p, UpdateVisualizationRequestVisualizationPb.class); + return UpdateVisualizationRequestVisualization.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualizationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualizationPb.java new file mode 100755 index 000000000..2c86a68d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualizationPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateVisualizationRequestVisualizationPb { + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("serialized_options") + private String serializedOptions; + + @JsonProperty("serialized_query_plan") + private String serializedQueryPlan; + + @JsonProperty("type") + private String typeValue; + + public UpdateVisualizationRequestVisualizationPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateVisualizationRequestVisualizationPb setSerializedOptions(String serializedOptions) { + this.serializedOptions = serializedOptions; + return this; + } + + public String getSerializedOptions() { + return serializedOptions; + } + + public UpdateVisualizationRequestVisualizationPb setSerializedQueryPlan( + String serializedQueryPlan) { + this.serializedQueryPlan = serializedQueryPlan; + return this; + } + + public String getSerializedQueryPlan() { + return serializedQueryPlan; + } + + public UpdateVisualizationRequestVisualizationPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateVisualizationRequestVisualizationPb that = (UpdateVisualizationRequestVisualizationPb) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(serializedOptions, that.serializedOptions) + && Objects.equals(serializedQueryPlan, that.serializedQueryPlan) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, serializedOptions, serializedQueryPlan, typeValue); + } + + @Override + public String toString() { + return new ToStringer(UpdateVisualizationRequestVisualizationPb.class) + .add("displayName", displayName) + .add("serializedOptions", serializedOptions) + .add("serializedQueryPlan", serializedQueryPlan) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/User.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/User.java index 5d6d4d36f..5994341e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/User.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/User.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = User.UserSerializer.class) +@JsonDeserialize(using = User.UserDeserializer.class) public class User { /** */ - @JsonProperty("email") private String email; /** */ - @JsonProperty("id") private Long id; /** */ - @JsonProperty("name") private String name; public User setEmail(String email) { @@ -71,4 +79,41 @@ public String toString() { .add("name", name) .toString(); } + + UserPb toPb() { + UserPb pb = new UserPb(); + pb.setEmail(email); + pb.setId(id); + pb.setName(name); + + return pb; + } + + static User fromPb(UserPb pb) { + User model = new User(); + model.setEmail(pb.getEmail()); + model.setId(pb.getId()); + model.setName(pb.getName()); + + return model; + } + + public static class UserSerializer extends JsonSerializer { + @Override + public void serialize(User value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UserPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UserDeserializer extends JsonDeserializer { + @Override + public User deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UserPb pb = mapper.readValue(p, UserPb.class); + return User.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UserPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UserPb.java new file mode 100755 index 000000000..479038c38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UserPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UserPb { + @JsonProperty("email") + private String email; + + @JsonProperty("id") + private Long id; + + @JsonProperty("name") + private String name; + + public UserPb setEmail(String email) { + this.email = email; + return this; + } + + public String getEmail() { + return email; + } + + public UserPb setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public UserPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UserPb that = (UserPb) o; + return Objects.equals(email, that.email) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(email, id, name); + } + + @Override + public String toString() { + return new ToStringer(UserPb.class) + .add("email", email) + .add("id", id) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Visualization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Visualization.java index 0ef4000c6..92e384dd3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Visualization.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Visualization.java @@ -4,47 +4,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Visualization.VisualizationSerializer.class) +@JsonDeserialize(using = Visualization.VisualizationDeserializer.class) public class Visualization { /** The timestamp indicating when the visualization was created. */ - @JsonProperty("create_time") private String createTime; /** The display name of the visualization. */ - @JsonProperty("display_name") private String displayName; /** UUID identifying the visualization. */ - @JsonProperty("id") private String id; /** UUID of the query that the visualization is attached to. */ - @JsonProperty("query_id") private String queryId; /** * The visualization options varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying visualization options directly. */ - @JsonProperty("serialized_options") private String serializedOptions; /** * The visualization query plan varies widely from one visualization type to the next and is * unsupported. Databricks does not recommend modifying the visualization query plan directly. */ - @JsonProperty("serialized_query_plan") private String serializedQueryPlan; /** The type of visualization: counter, table, funnel, and so on. */ - @JsonProperty("type") private String typeValue; /** The timestamp indicating when the visualization was updated. */ - @JsonProperty("update_time") private String updateTime; public Visualization setCreateTime(String createTime) { @@ -160,4 +163,51 @@ public String toString() { .add("updateTime", updateTime) .toString(); } + + VisualizationPb toPb() { + VisualizationPb pb = new VisualizationPb(); + pb.setCreateTime(createTime); + pb.setDisplayName(displayName); + pb.setId(id); + pb.setQueryId(queryId); + pb.setSerializedOptions(serializedOptions); + pb.setSerializedQueryPlan(serializedQueryPlan); + pb.setType(typeValue); + pb.setUpdateTime(updateTime); + + return pb; + } + + static Visualization fromPb(VisualizationPb pb) { + Visualization model = new Visualization(); + model.setCreateTime(pb.getCreateTime()); + model.setDisplayName(pb.getDisplayName()); + model.setId(pb.getId()); + model.setQueryId(pb.getQueryId()); + model.setSerializedOptions(pb.getSerializedOptions()); + model.setSerializedQueryPlan(pb.getSerializedQueryPlan()); + model.setType(pb.getType()); + model.setUpdateTime(pb.getUpdateTime()); + + return model; + } + + public static class VisualizationSerializer extends JsonSerializer { + @Override + public void serialize(Visualization value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VisualizationPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VisualizationDeserializer extends JsonDeserializer { + @Override + public Visualization deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VisualizationPb pb = mapper.readValue(p, VisualizationPb.class); + return Visualization.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/VisualizationPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/VisualizationPb.java new file mode 100755 index 000000000..79d3300ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/VisualizationPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class VisualizationPb { + @JsonProperty("create_time") + private String createTime; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("id") + private String id; + + @JsonProperty("query_id") + private String queryId; + + @JsonProperty("serialized_options") + private String serializedOptions; + + @JsonProperty("serialized_query_plan") + private String serializedQueryPlan; + + @JsonProperty("type") + private String typeValue; + + @JsonProperty("update_time") + private String updateTime; + + public VisualizationPb setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public VisualizationPb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public VisualizationPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public VisualizationPb setQueryId(String queryId) { + this.queryId = queryId; + return this; + } + + public String getQueryId() { + return queryId; + } + + public VisualizationPb setSerializedOptions(String serializedOptions) { + this.serializedOptions = serializedOptions; + return this; + } + + public String getSerializedOptions() { + return serializedOptions; + } + + public VisualizationPb setSerializedQueryPlan(String serializedQueryPlan) { + this.serializedQueryPlan = serializedQueryPlan; + return this; + } + + public String getSerializedQueryPlan() { + return serializedQueryPlan; + } + + public VisualizationPb setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + public VisualizationPb setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VisualizationPb that = (VisualizationPb) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(displayName, that.displayName) + && Objects.equals(id, that.id) + && Objects.equals(queryId, that.queryId) + && Objects.equals(serializedOptions, that.serializedOptions) + && Objects.equals(serializedQueryPlan, that.serializedQueryPlan) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + displayName, + id, + queryId, + serializedOptions, + serializedQueryPlan, + typeValue, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(VisualizationPb.class) + .add("createTime", createTime) + .add("displayName", displayName) + .add("id", id) + .add("queryId", queryId) + .add("serializedOptions", serializedOptions) + .add("serializedQueryPlan", serializedQueryPlan) + .add("typeValue", typeValue) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java index 1dc83f0f6..d4c43ef4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java @@ -4,25 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WarehouseAccessControlRequest.WarehouseAccessControlRequestSerializer.class) +@JsonDeserialize( + using = WarehouseAccessControlRequest.WarehouseAccessControlRequestDeserializer.class) public class WarehouseAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public WarehouseAccessControlRequest setGroupName(String groupName) { @@ -87,4 +95,48 @@ public String toString() { .add("userName", userName) .toString(); } + + WarehouseAccessControlRequestPb toPb() { + WarehouseAccessControlRequestPb pb = new WarehouseAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static WarehouseAccessControlRequest fromPb(WarehouseAccessControlRequestPb pb) { + WarehouseAccessControlRequest model = new WarehouseAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class WarehouseAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + WarehouseAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehouseAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehouseAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public WarehouseAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehouseAccessControlRequestPb pb = + mapper.readValue(p, WarehouseAccessControlRequestPb.class); + return WarehouseAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequestPb.java new file mode 100755 index 000000000..871b9e2e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequestPb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WarehouseAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private WarehousePermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public WarehouseAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public WarehouseAccessControlRequestPb setPermissionLevel( + WarehousePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WarehousePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public WarehouseAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public WarehouseAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehouseAccessControlRequestPb that = (WarehouseAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(WarehouseAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponse.java index d267b6f64..5c5f5df4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponse.java @@ -4,30 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = WarehouseAccessControlResponse.WarehouseAccessControlResponseSerializer.class) +@JsonDeserialize( + using = WarehouseAccessControlResponse.WarehouseAccessControlResponseDeserializer.class) public class WarehouseAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public WarehouseAccessControlResponse setAllPermissions( @@ -103,4 +111,50 @@ public String toString() { .add("userName", userName) .toString(); } + + WarehouseAccessControlResponsePb toPb() { + WarehouseAccessControlResponsePb pb = new WarehouseAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static WarehouseAccessControlResponse fromPb(WarehouseAccessControlResponsePb pb) { + WarehouseAccessControlResponse model = new WarehouseAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class WarehouseAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + WarehouseAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehouseAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehouseAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public WarehouseAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehouseAccessControlResponsePb pb = + mapper.readValue(p, WarehouseAccessControlResponsePb.class); + return WarehouseAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponsePb.java new file mode 100755 index 000000000..8090ad295 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlResponsePb.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WarehouseAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public WarehouseAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public WarehouseAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public WarehouseAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public WarehouseAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public WarehouseAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehouseAccessControlResponsePb that = (WarehouseAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(WarehouseAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java index e8fd1f68e..5c7797c6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WarehousePermission.WarehousePermissionSerializer.class) +@JsonDeserialize(using = WarehousePermission.WarehousePermissionDeserializer.class) public class WarehousePermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; public WarehousePermission setInherited(Boolean inherited) { @@ -72,4 +80,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + WarehousePermissionPb toPb() { + WarehousePermissionPb pb = new WarehousePermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static WarehousePermission fromPb(WarehousePermissionPb pb) { + WarehousePermission model = new WarehousePermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class WarehousePermissionSerializer extends JsonSerializer { + @Override + public void serialize(WarehousePermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehousePermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehousePermissionDeserializer + extends JsonDeserializer { + @Override + public WarehousePermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehousePermissionPb pb = mapper.readValue(p, WarehousePermissionPb.class); + return WarehousePermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionPb.java new file mode 100755 index 000000000..cacb8d935 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WarehousePermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private WarehousePermissionLevel permissionLevel; + + public WarehousePermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public WarehousePermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public WarehousePermissionPb setPermissionLevel(WarehousePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WarehousePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehousePermissionPb that = (WarehousePermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(WarehousePermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissions.java index c23d43f3f..5be77338c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WarehousePermissions.WarehousePermissionsSerializer.class) +@JsonDeserialize(using = WarehousePermissions.WarehousePermissionsDeserializer.class) public class WarehousePermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public WarehousePermissions setAccessControlList( @@ -73,4 +81,44 @@ public String toString() { .add("objectType", objectType) .toString(); } + + WarehousePermissionsPb toPb() { + WarehousePermissionsPb pb = new WarehousePermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static WarehousePermissions fromPb(WarehousePermissionsPb pb) { + WarehousePermissions model = new WarehousePermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class WarehousePermissionsSerializer extends JsonSerializer { + @Override + public void serialize( + WarehousePermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehousePermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehousePermissionsDeserializer + extends JsonDeserializer { + @Override + public WarehousePermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehousePermissionsPb pb = mapper.readValue(p, WarehousePermissionsPb.class); + return WarehousePermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java index 91241417b..55dd197ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = WarehousePermissionsDescription.WarehousePermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = WarehousePermissionsDescription.WarehousePermissionsDescriptionDeserializer.class) public class WarehousePermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; public WarehousePermissionsDescription setDescription(String description) { @@ -57,4 +68,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + WarehousePermissionsDescriptionPb toPb() { + WarehousePermissionsDescriptionPb pb = new WarehousePermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static WarehousePermissionsDescription fromPb(WarehousePermissionsDescriptionPb pb) { + WarehousePermissionsDescription model = new WarehousePermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class WarehousePermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + WarehousePermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehousePermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehousePermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public WarehousePermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehousePermissionsDescriptionPb pb = + mapper.readValue(p, WarehousePermissionsDescriptionPb.class); + return WarehousePermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescriptionPb.java new file mode 100755 index 000000000..aaeba3365 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WarehousePermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private WarehousePermissionLevel permissionLevel; + + public WarehousePermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public WarehousePermissionsDescriptionPb setPermissionLevel( + WarehousePermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WarehousePermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehousePermissionsDescriptionPb that = (WarehousePermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(WarehousePermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsPb.java new file mode 100755 index 000000000..017fc5267 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WarehousePermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public WarehousePermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public WarehousePermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public WarehousePermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehousePermissionsPb that = (WarehousePermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(WarehousePermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequest.java index f4cd0c4a7..2a6a2dd4f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WarehousePermissionsRequest.WarehousePermissionsRequestSerializer.class) +@JsonDeserialize(using = WarehousePermissionsRequest.WarehousePermissionsRequestDeserializer.class) public class WarehousePermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The SQL warehouse for which to get or manage permissions. */ - @JsonIgnore private String warehouseId; + private String warehouseId; public WarehousePermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("warehouseId", warehouseId) .toString(); } + + WarehousePermissionsRequestPb toPb() { + WarehousePermissionsRequestPb pb = new WarehousePermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setWarehouseId(warehouseId); + + return pb; + } + + static WarehousePermissionsRequest fromPb(WarehousePermissionsRequestPb pb) { + WarehousePermissionsRequest model = new WarehousePermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setWarehouseId(pb.getWarehouseId()); + + return model; + } + + public static class WarehousePermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + WarehousePermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehousePermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehousePermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public WarehousePermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehousePermissionsRequestPb pb = mapper.readValue(p, WarehousePermissionsRequestPb.class); + return WarehousePermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequestPb.java new file mode 100755 index 000000000..88a13d204 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WarehousePermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String warehouseId; + + public WarehousePermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public WarehousePermissionsRequestPb setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehousePermissionsRequestPb that = (WarehousePermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(WarehousePermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java index 562866b2e..71925fe0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java @@ -4,20 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WarehouseTypePair.WarehouseTypePairSerializer.class) +@JsonDeserialize(using = WarehouseTypePair.WarehouseTypePairDeserializer.class) public class WarehouseTypePair { /** * If set to false the specific warehouse type will not be be allowed as a value for * warehouse_type in CreateWarehouse and EditWarehouse */ - @JsonProperty("enabled") private Boolean enabled; /** Warehouse type: `PRO` or `CLASSIC`. */ - @JsonProperty("warehouse_type") private WarehouseTypePairWarehouseType warehouseType; public WarehouseTypePair setEnabled(Boolean enabled) { @@ -59,4 +68,40 @@ public String toString() { .add("warehouseType", warehouseType) .toString(); } + + WarehouseTypePairPb toPb() { + WarehouseTypePairPb pb = new WarehouseTypePairPb(); + pb.setEnabled(enabled); + pb.setWarehouseType(warehouseType); + + return pb; + } + + static WarehouseTypePair fromPb(WarehouseTypePairPb pb) { + WarehouseTypePair model = new WarehouseTypePair(); + model.setEnabled(pb.getEnabled()); + model.setWarehouseType(pb.getWarehouseType()); + + return model; + } + + public static class WarehouseTypePairSerializer extends JsonSerializer { + @Override + public void serialize(WarehouseTypePair value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WarehouseTypePairPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WarehouseTypePairDeserializer extends JsonDeserializer { + @Override + public WarehouseTypePair deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WarehouseTypePairPb pb = mapper.readValue(p, WarehouseTypePairPb.class); + return WarehouseTypePair.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairPb.java new file mode 100755 index 000000000..5768553be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WarehouseTypePairPb { + @JsonProperty("enabled") + private Boolean enabled; + + @JsonProperty("warehouse_type") + private WarehouseTypePairWarehouseType warehouseType; + + public WarehouseTypePairPb setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public WarehouseTypePairPb setWarehouseType(WarehouseTypePairWarehouseType warehouseType) { + this.warehouseType = warehouseType; + return this; + } + + public WarehouseTypePairWarehouseType getWarehouseType() { + return warehouseType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WarehouseTypePairPb that = (WarehouseTypePairPb) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(warehouseType, that.warehouseType); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, warehouseType); + } + + @Override + public String toString() { + return new ToStringer(WarehouseTypePairPb.class) + .add("enabled", enabled) + .add("warehouseType", warehouseType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java index d1fb0fda2..0fb06c3f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java @@ -21,7 +21,7 @@ public CreateWarehouseResponse create(CreateWarehouseRequest request) { String path = "/api/2.0/sql/warehouses"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateWarehouseResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s", request.getId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteWarehouseResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public void edit(EditWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/edit", request.getId()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EditWarehouseResponse.class); @@ -62,7 +62,7 @@ public GetWarehouseResponse get(GetWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s", request.getId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetWarehouseResponse.class); } catch (IOException e) { @@ -78,7 +78,7 @@ public GetWarehousePermissionLevelsResponse getPermissionLevels( "/api/2.0/permissions/warehouses/%s/permissionLevels", request.getWarehouseId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetWarehousePermissionLevelsResponse.class); } catch (IOException e) { @@ -91,7 +91,7 @@ public WarehousePermissions getPermissions(GetWarehousePermissionsRequest reques String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WarehousePermissions.class); } catch (IOException e) { @@ -116,7 +116,7 @@ public ListWarehousesResponse list(ListWarehousesRequest request) { String path = "/api/2.0/sql/warehouses"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListWarehousesResponse.class); } catch (IOException e) { @@ -129,7 +129,7 @@ public WarehousePermissions setPermissions(WarehousePermissionsRequest request) String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WarehousePermissions.class); @@ -143,7 +143,7 @@ public void setWorkspaceWarehouseConfig(SetWorkspaceWarehouseConfigRequest reque String path = "/api/2.0/sql/config/warehouses"; try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, SetWorkspaceWarehouseConfigResponse.class); @@ -157,7 +157,7 @@ public void start(StartRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/start", request.getId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, StartWarehouseResponse.class); } catch (IOException e) { @@ -170,7 +170,7 @@ public void stop(StopRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/stop", request.getId()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, StopWarehouseResponse.class); } catch (IOException e) { @@ -183,7 +183,7 @@ public WarehousePermissions updatePermissions(WarehousePermissionsRequest reques String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WarehousePermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java index 442092020..430267ac8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Widget.WidgetSerializer.class) +@JsonDeserialize(using = Widget.WidgetDeserializer.class) public class Widget { /** The unique ID for this widget. */ - @JsonProperty("id") private String id; /** */ - @JsonProperty("options") private WidgetOptions options; /** @@ -23,11 +32,9 @@ public class Widget { * create a new one with a POST request to the same endpoint. Databricks does not recommend * constructing ad-hoc visualizations entirely in JSON. */ - @JsonProperty("visualization") private LegacyVisualization visualization; /** Unused field. */ - @JsonProperty("width") private Long width; public Widget setId(String id) { @@ -91,4 +98,43 @@ public String toString() { .add("width", width) .toString(); } + + WidgetPb toPb() { + WidgetPb pb = new WidgetPb(); + pb.setId(id); + pb.setOptions(options); + pb.setVisualization(visualization); + pb.setWidth(width); + + return pb; + } + + static Widget fromPb(WidgetPb pb) { + Widget model = new Widget(); + model.setId(pb.getId()); + model.setOptions(pb.getOptions()); + model.setVisualization(pb.getVisualization()); + model.setWidth(pb.getWidth()); + + return model; + } + + public static class WidgetSerializer extends JsonSerializer { + @Override + public void serialize(Widget value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WidgetPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WidgetDeserializer extends JsonDeserializer { + @Override + public Widget deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WidgetPb pb = mapper.readValue(p, WidgetPb.class); + return Widget.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java index 45a832fe8..de7e6c28d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java @@ -4,43 +4,47 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = WidgetOptions.WidgetOptionsSerializer.class) +@JsonDeserialize(using = WidgetOptions.WidgetOptionsDeserializer.class) public class WidgetOptions { /** Timestamp when this object was created */ - @JsonProperty("created_at") private String createdAt; /** Custom description of the widget */ - @JsonProperty("description") private String description; /** Whether this widget is hidden on the dashboard. */ - @JsonProperty("isHidden") private Boolean isHidden; /** * How parameters used by the visualization in this widget relate to other widgets on the * dashboard. Databricks does not recommend modifying this definition in JSON. */ - @JsonProperty("parameterMappings") private Object parameterMappings; /** * Coordinates of this widget on a dashboard. This portion of the API changes frequently and is * unsupported. */ - @JsonProperty("position") private WidgetPosition position; /** Custom title of the widget */ - @JsonProperty("title") private String title; /** Timestamp of the last time this object was updated. */ - @JsonProperty("updated_at") private String updatedAt; public WidgetOptions setCreatedAt(String createdAt) { @@ -138,4 +142,49 @@ public String toString() { .add("updatedAt", updatedAt) .toString(); } + + WidgetOptionsPb toPb() { + WidgetOptionsPb pb = new WidgetOptionsPb(); + pb.setCreatedAt(createdAt); + pb.setDescription(description); + pb.setIsHidden(isHidden); + pb.setParameterMappings(parameterMappings); + pb.setPosition(position); + pb.setTitle(title); + pb.setUpdatedAt(updatedAt); + + return pb; + } + + static WidgetOptions fromPb(WidgetOptionsPb pb) { + WidgetOptions model = new WidgetOptions(); + model.setCreatedAt(pb.getCreatedAt()); + model.setDescription(pb.getDescription()); + model.setIsHidden(pb.getIsHidden()); + model.setParameterMappings(pb.getParameterMappings()); + model.setPosition(pb.getPosition()); + model.setTitle(pb.getTitle()); + model.setUpdatedAt(pb.getUpdatedAt()); + + return model; + } + + public static class WidgetOptionsSerializer extends JsonSerializer { + @Override + public void serialize(WidgetOptions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WidgetOptionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WidgetOptionsDeserializer extends JsonDeserializer { + @Override + public WidgetOptions deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WidgetOptionsPb pb = mapper.readValue(p, WidgetOptionsPb.class); + return WidgetOptions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptionsPb.java new file mode 100755 index 000000000..474360d75 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptionsPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WidgetOptionsPb { + @JsonProperty("created_at") + private String createdAt; + + @JsonProperty("description") + private String description; + + @JsonProperty("isHidden") + private Boolean isHidden; + + @JsonProperty("parameterMappings") + private Object parameterMappings; + + @JsonProperty("position") + private WidgetPosition position; + + @JsonProperty("title") + private String title; + + @JsonProperty("updated_at") + private String updatedAt; + + public WidgetOptionsPb setCreatedAt(String createdAt) { + this.createdAt = createdAt; + return this; + } + + public String getCreatedAt() { + return createdAt; + } + + public WidgetOptionsPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public WidgetOptionsPb setIsHidden(Boolean isHidden) { + this.isHidden = isHidden; + return this; + } + + public Boolean getIsHidden() { + return isHidden; + } + + public WidgetOptionsPb setParameterMappings(Object parameterMappings) { + this.parameterMappings = parameterMappings; + return this; + } + + public Object getParameterMappings() { + return parameterMappings; + } + + public WidgetOptionsPb setPosition(WidgetPosition position) { + this.position = position; + return this; + } + + public WidgetPosition getPosition() { + return position; + } + + public WidgetOptionsPb setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + public WidgetOptionsPb setUpdatedAt(String updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public String getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WidgetOptionsPb that = (WidgetOptionsPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(description, that.description) + && Objects.equals(isHidden, that.isHidden) + && Objects.equals(parameterMappings, that.parameterMappings) + && Objects.equals(position, that.position) + && Objects.equals(title, that.title) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, description, isHidden, parameterMappings, position, title, updatedAt); + } + + @Override + public String toString() { + return new ToStringer(WidgetOptionsPb.class) + .add("createdAt", createdAt) + .add("description", description) + .add("isHidden", isHidden) + .add("parameterMappings", parameterMappings) + .add("position", position) + .add("title", title) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPb.java new file mode 100755 index 000000000..cac9978f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WidgetPb { + @JsonProperty("id") + private String id; + + @JsonProperty("options") + private WidgetOptions options; + + @JsonProperty("visualization") + private LegacyVisualization visualization; + + @JsonProperty("width") + private Long width; + + public WidgetPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public WidgetPb setOptions(WidgetOptions options) { + this.options = options; + return this; + } + + public WidgetOptions getOptions() { + return options; + } + + public WidgetPb setVisualization(LegacyVisualization visualization) { + this.visualization = visualization; + return this; + } + + public LegacyVisualization getVisualization() { + return visualization; + } + + public WidgetPb setWidth(Long width) { + this.width = width; + return this; + } + + public Long getWidth() { + return width; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WidgetPb that = (WidgetPb) o; + return Objects.equals(id, that.id) + && Objects.equals(options, that.options) + && Objects.equals(visualization, that.visualization) + && Objects.equals(width, that.width); + } + + @Override + public int hashCode() { + return Objects.hash(id, options, visualization, width); + } + + @Override + public String toString() { + return new ToStringer(WidgetPb.class) + .add("id", id) + .add("options", options) + .add("visualization", visualization) + .add("width", width) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java index e114e5830..9c1aadbb3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,25 +21,22 @@ * unsupported. */ @Generated +@JsonSerialize(using = WidgetPosition.WidgetPositionSerializer.class) +@JsonDeserialize(using = WidgetPosition.WidgetPositionDeserializer.class) public class WidgetPosition { /** reserved for internal use */ - @JsonProperty("autoHeight") private Boolean autoHeight; /** column in the dashboard grid. Values start with 0 */ - @JsonProperty("col") private Long col; /** row in the dashboard grid. Values start with 0 */ - @JsonProperty("row") private Long row; /** width of the widget measured in dashboard grid cells */ - @JsonProperty("sizeX") private Long sizeX; /** height of the widget measured in dashboard grid cells */ - @JsonProperty("sizeY") private Long sizeY; public WidgetPosition setAutoHeight(Boolean autoHeight) { @@ -105,4 +111,46 @@ public String toString() { .add("sizeY", sizeY) .toString(); } + + WidgetPositionPb toPb() { + WidgetPositionPb pb = new WidgetPositionPb(); + pb.setAutoHeight(autoHeight); + pb.setCol(col); + pb.setRow(row); + pb.setSizeX(sizeX); + pb.setSizeY(sizeY); + + return pb; + } + + static WidgetPosition fromPb(WidgetPositionPb pb) { + WidgetPosition model = new WidgetPosition(); + model.setAutoHeight(pb.getAutoHeight()); + model.setCol(pb.getCol()); + model.setRow(pb.getRow()); + model.setSizeX(pb.getSizeX()); + model.setSizeY(pb.getSizeY()); + + return model; + } + + public static class WidgetPositionSerializer extends JsonSerializer { + @Override + public void serialize(WidgetPosition value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WidgetPositionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WidgetPositionDeserializer extends JsonDeserializer { + @Override + public WidgetPosition deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WidgetPositionPb pb = mapper.readValue(p, WidgetPositionPb.class); + return WidgetPosition.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPositionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPositionPb.java new file mode 100755 index 000000000..a22655134 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPositionPb.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Coordinates of this widget on a dashboard. This portion of the API changes frequently and is + * unsupported. + */ +@Generated +class WidgetPositionPb { + @JsonProperty("autoHeight") + private Boolean autoHeight; + + @JsonProperty("col") + private Long col; + + @JsonProperty("row") + private Long row; + + @JsonProperty("sizeX") + private Long sizeX; + + @JsonProperty("sizeY") + private Long sizeY; + + public WidgetPositionPb setAutoHeight(Boolean autoHeight) { + this.autoHeight = autoHeight; + return this; + } + + public Boolean getAutoHeight() { + return autoHeight; + } + + public WidgetPositionPb setCol(Long col) { + this.col = col; + return this; + } + + public Long getCol() { + return col; + } + + public WidgetPositionPb setRow(Long row) { + this.row = row; + return this; + } + + public Long getRow() { + return row; + } + + public WidgetPositionPb setSizeX(Long sizeX) { + this.sizeX = sizeX; + return this; + } + + public Long getSizeX() { + return sizeX; + } + + public WidgetPositionPb setSizeY(Long sizeY) { + this.sizeY = sizeY; + return this; + } + + public Long getSizeY() { + return sizeY; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WidgetPositionPb that = (WidgetPositionPb) o; + return Objects.equals(autoHeight, that.autoHeight) + && Objects.equals(col, that.col) + && Objects.equals(row, that.row) + && Objects.equals(sizeX, that.sizeX) + && Objects.equals(sizeY, that.sizeY); + } + + @Override + public int hashCode() { + return Objects.hash(autoHeight, col, row, sizeX, sizeY); + } + + @Override + public String toString() { + return new ToStringer(WidgetPositionPb.class) + .add("autoHeight", autoHeight) + .add("col", col) + .add("row", row) + .add("sizeX", sizeX) + .add("sizeY", sizeY) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java index e403d7e5a..4247a8271 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ColumnInfo.ColumnInfoSerializer.class) +@JsonDeserialize(using = ColumnInfo.ColumnInfoDeserializer.class) public class ColumnInfo { /** Name of the column. */ - @JsonProperty("name") private String name; public ColumnInfo setName(String name) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(ColumnInfo.class).add("name", name).toString(); } + + ColumnInfoPb toPb() { + ColumnInfoPb pb = new ColumnInfoPb(); + pb.setName(name); + + return pb; + } + + static ColumnInfo fromPb(ColumnInfoPb pb) { + ColumnInfo model = new ColumnInfo(); + model.setName(pb.getName()); + + return model; + } + + public static class ColumnInfoSerializer extends JsonSerializer { + @Override + public void serialize(ColumnInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ColumnInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ColumnInfoDeserializer extends JsonDeserializer { + @Override + public ColumnInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ColumnInfoPb pb = mapper.readValue(p, ColumnInfoPb.class); + return ColumnInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfoPb.java new file mode 100755 index 000000000..1aa88a6bb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfoPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ColumnInfoPb { + @JsonProperty("name") + private String name; + + public ColumnInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ColumnInfoPb that = (ColumnInfoPb) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(ColumnInfoPb.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Converters.java new file mode 100755 index 000000000..6c66bc9a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.vectorsearch; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java index 433feb92d..065caf46f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateEndpoint.CreateEndpointSerializer.class) +@JsonDeserialize(using = CreateEndpoint.CreateEndpointDeserializer.class) public class CreateEndpoint { /** The budget policy id to be applied */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** Type of endpoint */ - @JsonProperty("endpoint_type") private EndpointType endpointType; /** Name of the vector search endpoint */ - @JsonProperty("name") private String name; public CreateEndpoint setBudgetPolicyId(String budgetPolicyId) { @@ -71,4 +79,42 @@ public String toString() { .add("name", name) .toString(); } + + CreateEndpointPb toPb() { + CreateEndpointPb pb = new CreateEndpointPb(); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setEndpointType(endpointType); + pb.setName(name); + + return pb; + } + + static CreateEndpoint fromPb(CreateEndpointPb pb) { + CreateEndpoint model = new CreateEndpoint(); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setEndpointType(pb.getEndpointType()); + model.setName(pb.getName()); + + return model; + } + + public static class CreateEndpointSerializer extends JsonSerializer { + @Override + public void serialize(CreateEndpoint value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateEndpointPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateEndpointDeserializer extends JsonDeserializer { + @Override + public CreateEndpoint deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateEndpointPb pb = mapper.readValue(p, CreateEndpointPb.class); + return CreateEndpoint.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpointPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpointPb.java new file mode 100755 index 000000000..73b4a5dac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpointPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateEndpointPb { + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + @JsonProperty("name") + private String name; + + public CreateEndpointPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreateEndpointPb setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public CreateEndpointPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateEndpointPb that = (CreateEndpointPb) o; + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(budgetPolicyId, endpointType, name); + } + + @Override + public String toString() { + return new ToStringer(CreateEndpointPb.class) + .add("budgetPolicyId", budgetPolicyId) + .add("endpointType", endpointType) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java index 0856cbced..61a6811ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateVectorIndexRequest.CreateVectorIndexRequestSerializer.class) +@JsonDeserialize(using = CreateVectorIndexRequest.CreateVectorIndexRequestDeserializer.class) public class CreateVectorIndexRequest { /** Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. */ - @JsonProperty("delta_sync_index_spec") private DeltaSyncVectorIndexSpecRequest deltaSyncIndexSpec; /** Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. */ - @JsonProperty("direct_access_index_spec") private DirectAccessVectorIndexSpec directAccessIndexSpec; /** Name of the endpoint to be used for serving the index */ - @JsonProperty("endpoint_name") private String endpointName; /** @@ -28,15 +36,12 @@ public class CreateVectorIndexRequest { * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages * index updates. */ - @JsonProperty("index_type") private VectorIndexType indexType; /** Name of the index */ - @JsonProperty("name") private String name; /** Primary key of the index */ - @JsonProperty("primary_key") private String primaryKey; public CreateVectorIndexRequest setDeltaSyncIndexSpec( @@ -125,4 +130,51 @@ public String toString() { .add("primaryKey", primaryKey) .toString(); } + + CreateVectorIndexRequestPb toPb() { + CreateVectorIndexRequestPb pb = new CreateVectorIndexRequestPb(); + pb.setDeltaSyncIndexSpec(deltaSyncIndexSpec); + pb.setDirectAccessIndexSpec(directAccessIndexSpec); + pb.setEndpointName(endpointName); + pb.setIndexType(indexType); + pb.setName(name); + pb.setPrimaryKey(primaryKey); + + return pb; + } + + static CreateVectorIndexRequest fromPb(CreateVectorIndexRequestPb pb) { + CreateVectorIndexRequest model = new CreateVectorIndexRequest(); + model.setDeltaSyncIndexSpec(pb.getDeltaSyncIndexSpec()); + model.setDirectAccessIndexSpec(pb.getDirectAccessIndexSpec()); + model.setEndpointName(pb.getEndpointName()); + model.setIndexType(pb.getIndexType()); + model.setName(pb.getName()); + model.setPrimaryKey(pb.getPrimaryKey()); + + return model; + } + + public static class CreateVectorIndexRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateVectorIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateVectorIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateVectorIndexRequestDeserializer + extends JsonDeserializer { + @Override + public CreateVectorIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateVectorIndexRequestPb pb = mapper.readValue(p, CreateVectorIndexRequestPb.class); + return CreateVectorIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequestPb.java new file mode 100755 index 000000000..d305193ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequestPb.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateVectorIndexRequestPb { + @JsonProperty("delta_sync_index_spec") + private DeltaSyncVectorIndexSpecRequest deltaSyncIndexSpec; + + @JsonProperty("direct_access_index_spec") + private DirectAccessVectorIndexSpec directAccessIndexSpec; + + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonProperty("index_type") + private VectorIndexType indexType; + + @JsonProperty("name") + private String name; + + @JsonProperty("primary_key") + private String primaryKey; + + public CreateVectorIndexRequestPb setDeltaSyncIndexSpec( + DeltaSyncVectorIndexSpecRequest deltaSyncIndexSpec) { + this.deltaSyncIndexSpec = deltaSyncIndexSpec; + return this; + } + + public DeltaSyncVectorIndexSpecRequest getDeltaSyncIndexSpec() { + return deltaSyncIndexSpec; + } + + public CreateVectorIndexRequestPb setDirectAccessIndexSpec( + DirectAccessVectorIndexSpec directAccessIndexSpec) { + this.directAccessIndexSpec = directAccessIndexSpec; + return this; + } + + public DirectAccessVectorIndexSpec getDirectAccessIndexSpec() { + return directAccessIndexSpec; + } + + public CreateVectorIndexRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public CreateVectorIndexRequestPb setIndexType(VectorIndexType indexType) { + this.indexType = indexType; + return this; + } + + public VectorIndexType getIndexType() { + return indexType; + } + + public CreateVectorIndexRequestPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateVectorIndexRequestPb setPrimaryKey(String primaryKey) { + this.primaryKey = primaryKey; + return this; + } + + public String getPrimaryKey() { + return primaryKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateVectorIndexRequestPb that = (CreateVectorIndexRequestPb) o; + return Objects.equals(deltaSyncIndexSpec, that.deltaSyncIndexSpec) + && Objects.equals(directAccessIndexSpec, that.directAccessIndexSpec) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexType, that.indexType) + && Objects.equals(name, that.name) + && Objects.equals(primaryKey, that.primaryKey); + } + + @Override + public int hashCode() { + return Objects.hash( + deltaSyncIndexSpec, directAccessIndexSpec, endpointName, indexType, name, primaryKey); + } + + @Override + public String toString() { + return new ToStringer(CreateVectorIndexRequestPb.class) + .add("deltaSyncIndexSpec", deltaSyncIndexSpec) + .add("directAccessIndexSpec", directAccessIndexSpec) + .add("endpointName", endpointName) + .add("indexType", indexType) + .add("name", name) + .add("primaryKey", primaryKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTag.java index 1736987e3..6d2b8b1d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTag.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CustomTag.CustomTagSerializer.class) +@JsonDeserialize(using = CustomTag.CustomTagDeserializer.class) public class CustomTag { /** Key field for a vector search endpoint tag. */ - @JsonProperty("key") private String key; /** [Optional] Value field for a vector search endpoint tag. */ - @JsonProperty("value") private String value; public CustomTag setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(CustomTag.class).add("key", key).add("value", value).toString(); } + + CustomTagPb toPb() { + CustomTagPb pb = new CustomTagPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static CustomTag fromPb(CustomTagPb pb) { + CustomTag model = new CustomTag(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class CustomTagSerializer extends JsonSerializer { + @Override + public void serialize(CustomTag value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CustomTagPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CustomTagDeserializer extends JsonDeserializer { + @Override + public CustomTag deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CustomTagPb pb = mapper.readValue(p, CustomTagPb.class); + return CustomTag.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTagPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTagPb.java new file mode 100755 index 000000000..437ad929b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CustomTagPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CustomTagPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public CustomTagPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public CustomTagPb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomTagPb that = (CustomTagPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(CustomTagPb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java index 3173fd54a..97b6260a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteDataResult.DeleteDataResultSerializer.class) +@JsonDeserialize(using = DeleteDataResult.DeleteDataResultDeserializer.class) public class DeleteDataResult { /** List of primary keys for rows that failed to process. */ - @JsonProperty("failed_primary_keys") private Collection failedPrimaryKeys; /** Count of successfully processed rows. */ - @JsonProperty("success_row_count") private Long successRowCount; public DeleteDataResult setFailedPrimaryKeys(Collection failedPrimaryKeys) { @@ -57,4 +66,40 @@ public String toString() { .add("successRowCount", successRowCount) .toString(); } + + DeleteDataResultPb toPb() { + DeleteDataResultPb pb = new DeleteDataResultPb(); + pb.setFailedPrimaryKeys(failedPrimaryKeys); + pb.setSuccessRowCount(successRowCount); + + return pb; + } + + static DeleteDataResult fromPb(DeleteDataResultPb pb) { + DeleteDataResult model = new DeleteDataResult(); + model.setFailedPrimaryKeys(pb.getFailedPrimaryKeys()); + model.setSuccessRowCount(pb.getSuccessRowCount()); + + return model; + } + + public static class DeleteDataResultSerializer extends JsonSerializer { + @Override + public void serialize(DeleteDataResult value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDataResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDataResultDeserializer extends JsonDeserializer { + @Override + public DeleteDataResult deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDataResultPb pb = mapper.readValue(p, DeleteDataResultPb.class); + return DeleteDataResult.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResultPb.java new file mode 100755 index 000000000..9e4db3bf3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResultPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DeleteDataResultPb { + @JsonProperty("failed_primary_keys") + private Collection failedPrimaryKeys; + + @JsonProperty("success_row_count") + private Long successRowCount; + + public DeleteDataResultPb setFailedPrimaryKeys(Collection failedPrimaryKeys) { + this.failedPrimaryKeys = failedPrimaryKeys; + return this; + } + + public Collection getFailedPrimaryKeys() { + return failedPrimaryKeys; + } + + public DeleteDataResultPb setSuccessRowCount(Long successRowCount) { + this.successRowCount = successRowCount; + return this; + } + + public Long getSuccessRowCount() { + return successRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDataResultPb that = (DeleteDataResultPb) o; + return Objects.equals(failedPrimaryKeys, that.failedPrimaryKeys) + && Objects.equals(successRowCount, that.successRowCount); + } + + @Override + public int hashCode() { + return Objects.hash(failedPrimaryKeys, successRowCount); + } + + @Override + public String toString() { + return new ToStringer(DeleteDataResultPb.class) + .add("failedPrimaryKeys", failedPrimaryKeys) + .add("successRowCount", successRowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java index aa5783a57..9ca0bf5e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java @@ -3,21 +3,30 @@ package com.databricks.sdk.service.vectorsearch; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Delete data from index */ @Generated +@JsonSerialize(using = DeleteDataVectorIndexRequest.DeleteDataVectorIndexRequestSerializer.class) +@JsonDeserialize( + using = DeleteDataVectorIndexRequest.DeleteDataVectorIndexRequestDeserializer.class) public class DeleteDataVectorIndexRequest { /** Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. */ - @JsonIgnore private String indexName; + private String indexName; /** List of primary keys for the data to be deleted. */ - @JsonIgnore - @QueryParam("primary_keys") private Collection primaryKeys; public DeleteDataVectorIndexRequest setIndexName(String indexName) { @@ -59,4 +68,43 @@ public String toString() { .add("primaryKeys", primaryKeys) .toString(); } + + DeleteDataVectorIndexRequestPb toPb() { + DeleteDataVectorIndexRequestPb pb = new DeleteDataVectorIndexRequestPb(); + pb.setIndexName(indexName); + pb.setPrimaryKeys(primaryKeys); + + return pb; + } + + static DeleteDataVectorIndexRequest fromPb(DeleteDataVectorIndexRequestPb pb) { + DeleteDataVectorIndexRequest model = new DeleteDataVectorIndexRequest(); + model.setIndexName(pb.getIndexName()); + model.setPrimaryKeys(pb.getPrimaryKeys()); + + return model; + } + + public static class DeleteDataVectorIndexRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDataVectorIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDataVectorIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDataVectorIndexRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteDataVectorIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDataVectorIndexRequestPb pb = mapper.readValue(p, DeleteDataVectorIndexRequestPb.class); + return DeleteDataVectorIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequestPb.java new file mode 100755 index 000000000..0ddcf36fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +/** Delete data from index */ +@Generated +class DeleteDataVectorIndexRequestPb { + @JsonIgnore private String indexName; + + @JsonIgnore + @QueryParam("primary_keys") + private Collection primaryKeys; + + public DeleteDataVectorIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + public DeleteDataVectorIndexRequestPb setPrimaryKeys(Collection primaryKeys) { + this.primaryKeys = primaryKeys; + return this; + } + + public Collection getPrimaryKeys() { + return primaryKeys; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDataVectorIndexRequestPb that = (DeleteDataVectorIndexRequestPb) o; + return Objects.equals(indexName, that.indexName) + && Objects.equals(primaryKeys, that.primaryKeys); + } + + @Override + public int hashCode() { + return Objects.hash(indexName, primaryKeys); + } + + @Override + public String toString() { + return new ToStringer(DeleteDataVectorIndexRequestPb.class) + .add("indexName", indexName) + .add("primaryKeys", primaryKeys) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java index cb623de18..7cba12e9f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteDataVectorIndexResponse.DeleteDataVectorIndexResponseSerializer.class) +@JsonDeserialize( + using = DeleteDataVectorIndexResponse.DeleteDataVectorIndexResponseDeserializer.class) public class DeleteDataVectorIndexResponse { /** Result of the upsert or delete operation. */ - @JsonProperty("result") private DeleteDataResult result; /** Status of the delete operation. */ - @JsonProperty("status") private DeleteDataStatus status; public DeleteDataVectorIndexResponse setResult(DeleteDataResult result) { @@ -55,4 +65,44 @@ public String toString() { .add("status", status) .toString(); } + + DeleteDataVectorIndexResponsePb toPb() { + DeleteDataVectorIndexResponsePb pb = new DeleteDataVectorIndexResponsePb(); + pb.setResult(result); + pb.setStatus(status); + + return pb; + } + + static DeleteDataVectorIndexResponse fromPb(DeleteDataVectorIndexResponsePb pb) { + DeleteDataVectorIndexResponse model = new DeleteDataVectorIndexResponse(); + model.setResult(pb.getResult()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class DeleteDataVectorIndexResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteDataVectorIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteDataVectorIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDataVectorIndexResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteDataVectorIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteDataVectorIndexResponsePb pb = + mapper.readValue(p, DeleteDataVectorIndexResponsePb.class); + return DeleteDataVectorIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponsePb.java new file mode 100755 index 000000000..cbee1bae5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteDataVectorIndexResponsePb { + @JsonProperty("result") + private DeleteDataResult result; + + @JsonProperty("status") + private DeleteDataStatus status; + + public DeleteDataVectorIndexResponsePb setResult(DeleteDataResult result) { + this.result = result; + return this; + } + + public DeleteDataResult getResult() { + return result; + } + + public DeleteDataVectorIndexResponsePb setStatus(DeleteDataStatus status) { + this.status = status; + return this; + } + + public DeleteDataStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDataVectorIndexResponsePb that = (DeleteDataVectorIndexResponsePb) o; + return Objects.equals(result, that.result) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(result, status); + } + + @Override + public String toString() { + return new ToStringer(DeleteDataVectorIndexResponsePb.class) + .add("result", result) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java index 43a984847..6d5eae521 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an endpoint */ @Generated +@JsonSerialize(using = DeleteEndpointRequest.DeleteEndpointRequestSerializer.class) +@JsonDeserialize(using = DeleteEndpointRequest.DeleteEndpointRequestDeserializer.class) public class DeleteEndpointRequest { /** Name of the vector search endpoint */ - @JsonIgnore private String endpointName; + private String endpointName; public DeleteEndpointRequest setEndpointName(String endpointName) { this.endpointName = endpointName; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteEndpointRequest.class).add("endpointName", endpointName).toString(); } + + DeleteEndpointRequestPb toPb() { + DeleteEndpointRequestPb pb = new DeleteEndpointRequestPb(); + pb.setEndpointName(endpointName); + + return pb; + } + + static DeleteEndpointRequest fromPb(DeleteEndpointRequestPb pb) { + DeleteEndpointRequest model = new DeleteEndpointRequest(); + model.setEndpointName(pb.getEndpointName()); + + return model; + } + + public static class DeleteEndpointRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteEndpointRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteEndpointRequestPb pb = mapper.readValue(p, DeleteEndpointRequestPb.class); + return DeleteEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequestPb.java new file mode 100755 index 000000000..ca2eee9b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an endpoint */ +@Generated +class DeleteEndpointRequestPb { + @JsonIgnore private String endpointName; + + public DeleteEndpointRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteEndpointRequestPb that = (DeleteEndpointRequestPb) o; + return Objects.equals(endpointName, that.endpointName); + } + + @Override + public int hashCode() { + return Objects.hash(endpointName); + } + + @Override + public String toString() { + return new ToStringer(DeleteEndpointRequestPb.class) + .add("endpointName", endpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java index 1dd487c16..486c01375 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteEndpointResponse.DeleteEndpointResponseSerializer.class) +@JsonDeserialize(using = DeleteEndpointResponse.DeleteEndpointResponseDeserializer.class) public class DeleteEndpointResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteEndpointResponse.class).toString(); } + + DeleteEndpointResponsePb toPb() { + DeleteEndpointResponsePb pb = new DeleteEndpointResponsePb(); + + return pb; + } + + static DeleteEndpointResponse fromPb(DeleteEndpointResponsePb pb) { + DeleteEndpointResponse model = new DeleteEndpointResponse(); + + return model; + } + + public static class DeleteEndpointResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteEndpointResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteEndpointResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteEndpointResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteEndpointResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteEndpointResponsePb pb = mapper.readValue(p, DeleteEndpointResponsePb.class); + return DeleteEndpointResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponsePb.java new file mode 100755 index 000000000..08193b77e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteEndpointResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteEndpointResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java index b4114582b..1caad8ecf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete an index */ @Generated +@JsonSerialize(using = DeleteIndexRequest.DeleteIndexRequestSerializer.class) +@JsonDeserialize(using = DeleteIndexRequest.DeleteIndexRequestDeserializer.class) public class DeleteIndexRequest { /** Name of the index */ - @JsonIgnore private String indexName; + private String indexName; public DeleteIndexRequest setIndexName(String indexName) { this.indexName = indexName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteIndexRequest.class).add("indexName", indexName).toString(); } + + DeleteIndexRequestPb toPb() { + DeleteIndexRequestPb pb = new DeleteIndexRequestPb(); + pb.setIndexName(indexName); + + return pb; + } + + static DeleteIndexRequest fromPb(DeleteIndexRequestPb pb) { + DeleteIndexRequest model = new DeleteIndexRequest(); + model.setIndexName(pb.getIndexName()); + + return model; + } + + public static class DeleteIndexRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteIndexRequestDeserializer extends JsonDeserializer { + @Override + public DeleteIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteIndexRequestPb pb = mapper.readValue(p, DeleteIndexRequestPb.class); + return DeleteIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequestPb.java new file mode 100755 index 000000000..648960d04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete an index */ +@Generated +class DeleteIndexRequestPb { + @JsonIgnore private String indexName; + + public DeleteIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteIndexRequestPb that = (DeleteIndexRequestPb) o; + return Objects.equals(indexName, that.indexName); + } + + @Override + public int hashCode() { + return Objects.hash(indexName); + } + + @Override + public String toString() { + return new ToStringer(DeleteIndexRequestPb.class).add("indexName", indexName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java index 0acf66aac..e61da0862 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteIndexResponse.DeleteIndexResponseSerializer.class) +@JsonDeserialize(using = DeleteIndexResponse.DeleteIndexResponseDeserializer.class) public class DeleteIndexResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteIndexResponse.class).toString(); } + + DeleteIndexResponsePb toPb() { + DeleteIndexResponsePb pb = new DeleteIndexResponsePb(); + + return pb; + } + + static DeleteIndexResponse fromPb(DeleteIndexResponsePb pb) { + DeleteIndexResponse model = new DeleteIndexResponse(); + + return model; + } + + public static class DeleteIndexResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteIndexResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteIndexResponsePb pb = mapper.readValue(p, DeleteIndexResponsePb.class); + return DeleteIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponsePb.java new file mode 100755 index 000000000..6cc926e18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteIndexResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteIndexResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java index e893e2b55..11b1f2fc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java @@ -4,33 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = DeltaSyncVectorIndexSpecRequest.DeltaSyncVectorIndexSpecRequestSerializer.class) +@JsonDeserialize( + using = DeltaSyncVectorIndexSpecRequest.DeltaSyncVectorIndexSpecRequestDeserializer.class) public class DeltaSyncVectorIndexSpecRequest { /** * [Optional] Select the columns to sync with the vector index. If you leave this field blank, all * columns from the source table are synced with the index. The primary key column and embedding * source column or embedding vector column are always synced. */ - @JsonProperty("columns_to_sync") private Collection columnsToSync; /** The columns that contain the embedding source. */ - @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; /** The columns that contain the embedding vectors. */ - @JsonProperty("embedding_vector_columns") private Collection embeddingVectorColumns; /** * [Optional] Name of the Delta table to sync the vector index contents and computed embeddings * to. */ - @JsonProperty("embedding_writeback_table") private String embeddingWritebackTable; /** @@ -40,11 +49,9 @@ public class DeltaSyncVectorIndexSpecRequest { * `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it * arrives in the source table to keep vector index fresh. */ - @JsonProperty("pipeline_type") private PipelineType pipelineType; /** The name of the source table. */ - @JsonProperty("source_table") private String sourceTable; public DeltaSyncVectorIndexSpecRequest setColumnsToSync(Collection columnsToSync) { @@ -139,4 +146,52 @@ public String toString() { .add("sourceTable", sourceTable) .toString(); } + + DeltaSyncVectorIndexSpecRequestPb toPb() { + DeltaSyncVectorIndexSpecRequestPb pb = new DeltaSyncVectorIndexSpecRequestPb(); + pb.setColumnsToSync(columnsToSync); + pb.setEmbeddingSourceColumns(embeddingSourceColumns); + pb.setEmbeddingVectorColumns(embeddingVectorColumns); + pb.setEmbeddingWritebackTable(embeddingWritebackTable); + pb.setPipelineType(pipelineType); + pb.setSourceTable(sourceTable); + + return pb; + } + + static DeltaSyncVectorIndexSpecRequest fromPb(DeltaSyncVectorIndexSpecRequestPb pb) { + DeltaSyncVectorIndexSpecRequest model = new DeltaSyncVectorIndexSpecRequest(); + model.setColumnsToSync(pb.getColumnsToSync()); + model.setEmbeddingSourceColumns(pb.getEmbeddingSourceColumns()); + model.setEmbeddingVectorColumns(pb.getEmbeddingVectorColumns()); + model.setEmbeddingWritebackTable(pb.getEmbeddingWritebackTable()); + model.setPipelineType(pb.getPipelineType()); + model.setSourceTable(pb.getSourceTable()); + + return model; + } + + public static class DeltaSyncVectorIndexSpecRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSyncVectorIndexSpecRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSyncVectorIndexSpecRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSyncVectorIndexSpecRequestDeserializer + extends JsonDeserializer { + @Override + public DeltaSyncVectorIndexSpecRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSyncVectorIndexSpecRequestPb pb = + mapper.readValue(p, DeltaSyncVectorIndexSpecRequestPb.class); + return DeltaSyncVectorIndexSpecRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequestPb.java new file mode 100755 index 000000000..140c834d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequestPb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DeltaSyncVectorIndexSpecRequestPb { + @JsonProperty("columns_to_sync") + private Collection columnsToSync; + + @JsonProperty("embedding_source_columns") + private Collection embeddingSourceColumns; + + @JsonProperty("embedding_vector_columns") + private Collection embeddingVectorColumns; + + @JsonProperty("embedding_writeback_table") + private String embeddingWritebackTable; + + @JsonProperty("pipeline_type") + private PipelineType pipelineType; + + @JsonProperty("source_table") + private String sourceTable; + + public DeltaSyncVectorIndexSpecRequestPb setColumnsToSync(Collection columnsToSync) { + this.columnsToSync = columnsToSync; + return this; + } + + public Collection getColumnsToSync() { + return columnsToSync; + } + + public DeltaSyncVectorIndexSpecRequestPb setEmbeddingSourceColumns( + Collection embeddingSourceColumns) { + this.embeddingSourceColumns = embeddingSourceColumns; + return this; + } + + public Collection getEmbeddingSourceColumns() { + return embeddingSourceColumns; + } + + public DeltaSyncVectorIndexSpecRequestPb setEmbeddingVectorColumns( + Collection embeddingVectorColumns) { + this.embeddingVectorColumns = embeddingVectorColumns; + return this; + } + + public Collection getEmbeddingVectorColumns() { + return embeddingVectorColumns; + } + + public DeltaSyncVectorIndexSpecRequestPb setEmbeddingWritebackTable( + String embeddingWritebackTable) { + this.embeddingWritebackTable = embeddingWritebackTable; + return this; + } + + public String getEmbeddingWritebackTable() { + return embeddingWritebackTable; + } + + public DeltaSyncVectorIndexSpecRequestPb setPipelineType(PipelineType pipelineType) { + this.pipelineType = pipelineType; + return this; + } + + public PipelineType getPipelineType() { + return pipelineType; + } + + public DeltaSyncVectorIndexSpecRequestPb setSourceTable(String sourceTable) { + this.sourceTable = sourceTable; + return this; + } + + public String getSourceTable() { + return sourceTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSyncVectorIndexSpecRequestPb that = (DeltaSyncVectorIndexSpecRequestPb) o; + return Objects.equals(columnsToSync, that.columnsToSync) + && Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) + && Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable) + && Objects.equals(pipelineType, that.pipelineType) + && Objects.equals(sourceTable, that.sourceTable); + } + + @Override + public int hashCode() { + return Objects.hash( + columnsToSync, + embeddingSourceColumns, + embeddingVectorColumns, + embeddingWritebackTable, + pipelineType, + sourceTable); + } + + @Override + public String toString() { + return new ToStringer(DeltaSyncVectorIndexSpecRequestPb.class) + .add("columnsToSync", columnsToSync) + .add("embeddingSourceColumns", embeddingSourceColumns) + .add("embeddingVectorColumns", embeddingVectorColumns) + .add("embeddingWritebackTable", embeddingWritebackTable) + .add("pipelineType", pipelineType) + .add("sourceTable", sourceTable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java index 9f2f17700..b9e205c0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java @@ -4,29 +4,38 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = DeltaSyncVectorIndexSpecResponse.DeltaSyncVectorIndexSpecResponseSerializer.class) +@JsonDeserialize( + using = DeltaSyncVectorIndexSpecResponse.DeltaSyncVectorIndexSpecResponseDeserializer.class) public class DeltaSyncVectorIndexSpecResponse { /** The columns that contain the embedding source. */ - @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; /** The columns that contain the embedding vectors. */ - @JsonProperty("embedding_vector_columns") private Collection embeddingVectorColumns; /** * [Optional] Name of the Delta table to sync the vector index contents and computed embeddings * to. */ - @JsonProperty("embedding_writeback_table") private String embeddingWritebackTable; /** The ID of the pipeline that is used to sync the index. */ - @JsonProperty("pipeline_id") private String pipelineId; /** @@ -36,11 +45,9 @@ public class DeltaSyncVectorIndexSpecResponse { * `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it * arrives in the source table to keep vector index fresh. */ - @JsonProperty("pipeline_type") private PipelineType pipelineType; /** The name of the source table. */ - @JsonProperty("source_table") private String sourceTable; public DeltaSyncVectorIndexSpecResponse setEmbeddingSourceColumns( @@ -135,4 +142,52 @@ public String toString() { .add("sourceTable", sourceTable) .toString(); } + + DeltaSyncVectorIndexSpecResponsePb toPb() { + DeltaSyncVectorIndexSpecResponsePb pb = new DeltaSyncVectorIndexSpecResponsePb(); + pb.setEmbeddingSourceColumns(embeddingSourceColumns); + pb.setEmbeddingVectorColumns(embeddingVectorColumns); + pb.setEmbeddingWritebackTable(embeddingWritebackTable); + pb.setPipelineId(pipelineId); + pb.setPipelineType(pipelineType); + pb.setSourceTable(sourceTable); + + return pb; + } + + static DeltaSyncVectorIndexSpecResponse fromPb(DeltaSyncVectorIndexSpecResponsePb pb) { + DeltaSyncVectorIndexSpecResponse model = new DeltaSyncVectorIndexSpecResponse(); + model.setEmbeddingSourceColumns(pb.getEmbeddingSourceColumns()); + model.setEmbeddingVectorColumns(pb.getEmbeddingVectorColumns()); + model.setEmbeddingWritebackTable(pb.getEmbeddingWritebackTable()); + model.setPipelineId(pb.getPipelineId()); + model.setPipelineType(pb.getPipelineType()); + model.setSourceTable(pb.getSourceTable()); + + return model; + } + + public static class DeltaSyncVectorIndexSpecResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeltaSyncVectorIndexSpecResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeltaSyncVectorIndexSpecResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeltaSyncVectorIndexSpecResponseDeserializer + extends JsonDeserializer { + @Override + public DeltaSyncVectorIndexSpecResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeltaSyncVectorIndexSpecResponsePb pb = + mapper.readValue(p, DeltaSyncVectorIndexSpecResponsePb.class); + return DeltaSyncVectorIndexSpecResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponsePb.java new file mode 100755 index 000000000..aa42d1ef2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponsePb.java @@ -0,0 +1,123 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DeltaSyncVectorIndexSpecResponsePb { + @JsonProperty("embedding_source_columns") + private Collection embeddingSourceColumns; + + @JsonProperty("embedding_vector_columns") + private Collection embeddingVectorColumns; + + @JsonProperty("embedding_writeback_table") + private String embeddingWritebackTable; + + @JsonProperty("pipeline_id") + private String pipelineId; + + @JsonProperty("pipeline_type") + private PipelineType pipelineType; + + @JsonProperty("source_table") + private String sourceTable; + + public DeltaSyncVectorIndexSpecResponsePb setEmbeddingSourceColumns( + Collection embeddingSourceColumns) { + this.embeddingSourceColumns = embeddingSourceColumns; + return this; + } + + public Collection getEmbeddingSourceColumns() { + return embeddingSourceColumns; + } + + public DeltaSyncVectorIndexSpecResponsePb setEmbeddingVectorColumns( + Collection embeddingVectorColumns) { + this.embeddingVectorColumns = embeddingVectorColumns; + return this; + } + + public Collection getEmbeddingVectorColumns() { + return embeddingVectorColumns; + } + + public DeltaSyncVectorIndexSpecResponsePb setEmbeddingWritebackTable( + String embeddingWritebackTable) { + this.embeddingWritebackTable = embeddingWritebackTable; + return this; + } + + public String getEmbeddingWritebackTable() { + return embeddingWritebackTable; + } + + public DeltaSyncVectorIndexSpecResponsePb setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public DeltaSyncVectorIndexSpecResponsePb setPipelineType(PipelineType pipelineType) { + this.pipelineType = pipelineType; + return this; + } + + public PipelineType getPipelineType() { + return pipelineType; + } + + public DeltaSyncVectorIndexSpecResponsePb setSourceTable(String sourceTable) { + this.sourceTable = sourceTable; + return this; + } + + public String getSourceTable() { + return sourceTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSyncVectorIndexSpecResponsePb that = (DeltaSyncVectorIndexSpecResponsePb) o; + return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) + && Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(pipelineType, that.pipelineType) + && Objects.equals(sourceTable, that.sourceTable); + } + + @Override + public int hashCode() { + return Objects.hash( + embeddingSourceColumns, + embeddingVectorColumns, + embeddingWritebackTable, + pipelineId, + pipelineType, + sourceTable); + } + + @Override + public String toString() { + return new ToStringer(DeltaSyncVectorIndexSpecResponsePb.class) + .add("embeddingSourceColumns", embeddingSourceColumns) + .add("embeddingVectorColumns", embeddingVectorColumns) + .add("embeddingWritebackTable", embeddingWritebackTable) + .add("pipelineId", pipelineId) + .add("pipelineType", pipelineType) + .add("sourceTable", sourceTable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java index 579c22ce3..8c6e7ed60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = DirectAccessVectorIndexSpec.DirectAccessVectorIndexSpecSerializer.class) +@JsonDeserialize(using = DirectAccessVectorIndexSpec.DirectAccessVectorIndexSpecDeserializer.class) public class DirectAccessVectorIndexSpec { /** The columns that contain the embedding source. The format should be array[double]. */ - @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; /** The columns that contain the embedding vectors. The format should be array[double]. */ - @JsonProperty("embedding_vector_columns") private Collection embeddingVectorColumns; /** @@ -23,7 +32,6 @@ public class DirectAccessVectorIndexSpec { * `double`, `boolean`, `string`, `date`, `timestamp`. Supported types for vector column: * `array`, `array`,`. */ - @JsonProperty("schema_json") private String schemaJson; public DirectAccessVectorIndexSpec setEmbeddingSourceColumns( @@ -78,4 +86,45 @@ public String toString() { .add("schemaJson", schemaJson) .toString(); } + + DirectAccessVectorIndexSpecPb toPb() { + DirectAccessVectorIndexSpecPb pb = new DirectAccessVectorIndexSpecPb(); + pb.setEmbeddingSourceColumns(embeddingSourceColumns); + pb.setEmbeddingVectorColumns(embeddingVectorColumns); + pb.setSchemaJson(schemaJson); + + return pb; + } + + static DirectAccessVectorIndexSpec fromPb(DirectAccessVectorIndexSpecPb pb) { + DirectAccessVectorIndexSpec model = new DirectAccessVectorIndexSpec(); + model.setEmbeddingSourceColumns(pb.getEmbeddingSourceColumns()); + model.setEmbeddingVectorColumns(pb.getEmbeddingVectorColumns()); + model.setSchemaJson(pb.getSchemaJson()); + + return model; + } + + public static class DirectAccessVectorIndexSpecSerializer + extends JsonSerializer { + @Override + public void serialize( + DirectAccessVectorIndexSpec value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DirectAccessVectorIndexSpecPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DirectAccessVectorIndexSpecDeserializer + extends JsonDeserializer { + @Override + public DirectAccessVectorIndexSpec deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DirectAccessVectorIndexSpecPb pb = mapper.readValue(p, DirectAccessVectorIndexSpecPb.class); + return DirectAccessVectorIndexSpec.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpecPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpecPb.java new file mode 100755 index 000000000..752ba5ad5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpecPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class DirectAccessVectorIndexSpecPb { + @JsonProperty("embedding_source_columns") + private Collection embeddingSourceColumns; + + @JsonProperty("embedding_vector_columns") + private Collection embeddingVectorColumns; + + @JsonProperty("schema_json") + private String schemaJson; + + public DirectAccessVectorIndexSpecPb setEmbeddingSourceColumns( + Collection embeddingSourceColumns) { + this.embeddingSourceColumns = embeddingSourceColumns; + return this; + } + + public Collection getEmbeddingSourceColumns() { + return embeddingSourceColumns; + } + + public DirectAccessVectorIndexSpecPb setEmbeddingVectorColumns( + Collection embeddingVectorColumns) { + this.embeddingVectorColumns = embeddingVectorColumns; + return this; + } + + public Collection getEmbeddingVectorColumns() { + return embeddingVectorColumns; + } + + public DirectAccessVectorIndexSpecPb setSchemaJson(String schemaJson) { + this.schemaJson = schemaJson; + return this; + } + + public String getSchemaJson() { + return schemaJson; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DirectAccessVectorIndexSpecPb that = (DirectAccessVectorIndexSpecPb) o; + return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) + && Objects.equals(schemaJson, that.schemaJson); + } + + @Override + public int hashCode() { + return Objects.hash(embeddingSourceColumns, embeddingVectorColumns, schemaJson); + } + + @Override + public String toString() { + return new ToStringer(DirectAccessVectorIndexSpecPb.class) + .add("embeddingSourceColumns", embeddingSourceColumns) + .add("embeddingVectorColumns", embeddingVectorColumns) + .add("schemaJson", schemaJson) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java index 3ef8c00ec..a437421b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EmbeddingSourceColumn.EmbeddingSourceColumnSerializer.class) +@JsonDeserialize(using = EmbeddingSourceColumn.EmbeddingSourceColumnDeserializer.class) public class EmbeddingSourceColumn { /** Name of the embedding model endpoint */ - @JsonProperty("embedding_model_endpoint_name") private String embeddingModelEndpointName; /** Name of the column */ - @JsonProperty("name") private String name; public EmbeddingSourceColumn setEmbeddingModelEndpointName(String embeddingModelEndpointName) { @@ -56,4 +65,43 @@ public String toString() { .add("name", name) .toString(); } + + EmbeddingSourceColumnPb toPb() { + EmbeddingSourceColumnPb pb = new EmbeddingSourceColumnPb(); + pb.setEmbeddingModelEndpointName(embeddingModelEndpointName); + pb.setName(name); + + return pb; + } + + static EmbeddingSourceColumn fromPb(EmbeddingSourceColumnPb pb) { + EmbeddingSourceColumn model = new EmbeddingSourceColumn(); + model.setEmbeddingModelEndpointName(pb.getEmbeddingModelEndpointName()); + model.setName(pb.getName()); + + return model; + } + + public static class EmbeddingSourceColumnSerializer + extends JsonSerializer { + @Override + public void serialize( + EmbeddingSourceColumn value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmbeddingSourceColumnPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmbeddingSourceColumnDeserializer + extends JsonDeserializer { + @Override + public EmbeddingSourceColumn deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmbeddingSourceColumnPb pb = mapper.readValue(p, EmbeddingSourceColumnPb.class); + return EmbeddingSourceColumn.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumnPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumnPb.java new file mode 100755 index 000000000..c5e82c69c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumnPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EmbeddingSourceColumnPb { + @JsonProperty("embedding_model_endpoint_name") + private String embeddingModelEndpointName; + + @JsonProperty("name") + private String name; + + public EmbeddingSourceColumnPb setEmbeddingModelEndpointName(String embeddingModelEndpointName) { + this.embeddingModelEndpointName = embeddingModelEndpointName; + return this; + } + + public String getEmbeddingModelEndpointName() { + return embeddingModelEndpointName; + } + + public EmbeddingSourceColumnPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EmbeddingSourceColumnPb that = (EmbeddingSourceColumnPb) o; + return Objects.equals(embeddingModelEndpointName, that.embeddingModelEndpointName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(embeddingModelEndpointName, name); + } + + @Override + public String toString() { + return new ToStringer(EmbeddingSourceColumnPb.class) + .add("embeddingModelEndpointName", embeddingModelEndpointName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java index dba295871..21b72703f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = EmbeddingVectorColumn.EmbeddingVectorColumnSerializer.class) +@JsonDeserialize(using = EmbeddingVectorColumn.EmbeddingVectorColumnDeserializer.class) public class EmbeddingVectorColumn { /** Dimension of the embedding vector */ - @JsonProperty("embedding_dimension") private Long embeddingDimension; /** Name of the column */ - @JsonProperty("name") private String name; public EmbeddingVectorColumn setEmbeddingDimension(Long embeddingDimension) { @@ -56,4 +65,43 @@ public String toString() { .add("name", name) .toString(); } + + EmbeddingVectorColumnPb toPb() { + EmbeddingVectorColumnPb pb = new EmbeddingVectorColumnPb(); + pb.setEmbeddingDimension(embeddingDimension); + pb.setName(name); + + return pb; + } + + static EmbeddingVectorColumn fromPb(EmbeddingVectorColumnPb pb) { + EmbeddingVectorColumn model = new EmbeddingVectorColumn(); + model.setEmbeddingDimension(pb.getEmbeddingDimension()); + model.setName(pb.getName()); + + return model; + } + + public static class EmbeddingVectorColumnSerializer + extends JsonSerializer { + @Override + public void serialize( + EmbeddingVectorColumn value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EmbeddingVectorColumnPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EmbeddingVectorColumnDeserializer + extends JsonDeserializer { + @Override + public EmbeddingVectorColumn deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EmbeddingVectorColumnPb pb = mapper.readValue(p, EmbeddingVectorColumnPb.class); + return EmbeddingVectorColumn.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumnPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumnPb.java new file mode 100755 index 000000000..efea4920b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumnPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class EmbeddingVectorColumnPb { + @JsonProperty("embedding_dimension") + private Long embeddingDimension; + + @JsonProperty("name") + private String name; + + public EmbeddingVectorColumnPb setEmbeddingDimension(Long embeddingDimension) { + this.embeddingDimension = embeddingDimension; + return this; + } + + public Long getEmbeddingDimension() { + return embeddingDimension; + } + + public EmbeddingVectorColumnPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EmbeddingVectorColumnPb that = (EmbeddingVectorColumnPb) o; + return Objects.equals(embeddingDimension, that.embeddingDimension) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(embeddingDimension, name); + } + + @Override + public String toString() { + return new ToStringer(EmbeddingVectorColumnPb.class) + .add("embeddingDimension", embeddingDimension) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java index f7b204ad1..956b8c26e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java @@ -4,54 +4,54 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = EndpointInfo.EndpointInfoSerializer.class) +@JsonDeserialize(using = EndpointInfo.EndpointInfoDeserializer.class) public class EndpointInfo { /** Timestamp of endpoint creation */ - @JsonProperty("creation_timestamp") private Long creationTimestamp; /** Creator of the endpoint */ - @JsonProperty("creator") private String creator; /** The custom tags assigned to the endpoint */ - @JsonProperty("custom_tags") private Collection customTags; /** The budget policy id applied to the endpoint */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; /** Current status of the endpoint */ - @JsonProperty("endpoint_status") private EndpointStatus endpointStatus; /** Type of endpoint */ - @JsonProperty("endpoint_type") private EndpointType endpointType; /** Unique identifier of the endpoint */ - @JsonProperty("id") private String id; /** Timestamp of last update to the endpoint */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; /** User who last updated the endpoint */ - @JsonProperty("last_updated_user") private String lastUpdatedUser; /** Name of the vector search endpoint */ - @JsonProperty("name") private String name; /** Number of indexes on the endpoint */ - @JsonProperty("num_indexes") private Long numIndexes; public EndpointInfo setCreationTimestamp(Long creationTimestamp) { @@ -203,4 +203,57 @@ public String toString() { .add("numIndexes", numIndexes) .toString(); } + + EndpointInfoPb toPb() { + EndpointInfoPb pb = new EndpointInfoPb(); + pb.setCreationTimestamp(creationTimestamp); + pb.setCreator(creator); + pb.setCustomTags(customTags); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + pb.setEndpointStatus(endpointStatus); + pb.setEndpointType(endpointType); + pb.setId(id); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + pb.setLastUpdatedUser(lastUpdatedUser); + pb.setName(name); + pb.setNumIndexes(numIndexes); + + return pb; + } + + static EndpointInfo fromPb(EndpointInfoPb pb) { + EndpointInfo model = new EndpointInfo(); + model.setCreationTimestamp(pb.getCreationTimestamp()); + model.setCreator(pb.getCreator()); + model.setCustomTags(pb.getCustomTags()); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + model.setEndpointStatus(pb.getEndpointStatus()); + model.setEndpointType(pb.getEndpointType()); + model.setId(pb.getId()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + model.setLastUpdatedUser(pb.getLastUpdatedUser()); + model.setName(pb.getName()); + model.setNumIndexes(pb.getNumIndexes()); + + return model; + } + + public static class EndpointInfoSerializer extends JsonSerializer { + @Override + public void serialize(EndpointInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointInfoDeserializer extends JsonDeserializer { + @Override + public EndpointInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointInfoPb pb = mapper.readValue(p, EndpointInfoPb.class); + return EndpointInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfoPb.java new file mode 100755 index 000000000..60b8bcd3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfoPb.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class EndpointInfoPb { + @JsonProperty("creation_timestamp") + private Long creationTimestamp; + + @JsonProperty("creator") + private String creator; + + @JsonProperty("custom_tags") + private Collection customTags; + + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + @JsonProperty("endpoint_status") + private EndpointStatus endpointStatus; + + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + @JsonProperty("id") + private String id; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + @JsonProperty("last_updated_user") + private String lastUpdatedUser; + + @JsonProperty("name") + private String name; + + @JsonProperty("num_indexes") + private Long numIndexes; + + public EndpointInfoPb setCreationTimestamp(Long creationTimestamp) { + this.creationTimestamp = creationTimestamp; + return this; + } + + public Long getCreationTimestamp() { + return creationTimestamp; + } + + public EndpointInfoPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public EndpointInfoPb setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public EndpointInfoPb setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public EndpointInfoPb setEndpointStatus(EndpointStatus endpointStatus) { + this.endpointStatus = endpointStatus; + return this; + } + + public EndpointStatus getEndpointStatus() { + return endpointStatus; + } + + public EndpointInfoPb setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public EndpointInfoPb setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public EndpointInfoPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public EndpointInfoPb setLastUpdatedUser(String lastUpdatedUser) { + this.lastUpdatedUser = lastUpdatedUser; + return this; + } + + public String getLastUpdatedUser() { + return lastUpdatedUser; + } + + public EndpointInfoPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EndpointInfoPb setNumIndexes(Long numIndexes) { + this.numIndexes = numIndexes; + return this; + } + + public Long getNumIndexes() { + return numIndexes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointInfoPb that = (EndpointInfoPb) o; + return Objects.equals(creationTimestamp, that.creationTimestamp) + && Objects.equals(creator, that.creator) + && Objects.equals(customTags, that.customTags) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(endpointStatus, that.endpointStatus) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(id, that.id) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(lastUpdatedUser, that.lastUpdatedUser) + && Objects.equals(name, that.name) + && Objects.equals(numIndexes, that.numIndexes); + } + + @Override + public int hashCode() { + return Objects.hash( + creationTimestamp, + creator, + customTags, + effectiveBudgetPolicyId, + endpointStatus, + endpointType, + id, + lastUpdatedTimestamp, + lastUpdatedUser, + name, + numIndexes); + } + + @Override + public String toString() { + return new ToStringer(EndpointInfoPb.class) + .add("creationTimestamp", creationTimestamp) + .add("creator", creator) + .add("customTags", customTags) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("endpointStatus", endpointStatus) + .add("endpointType", endpointType) + .add("id", id) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("lastUpdatedUser", lastUpdatedUser) + .add("name", name) + .add("numIndexes", numIndexes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java index 7b452f74a..c1cca2347 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Status information of an endpoint */ @Generated +@JsonSerialize(using = EndpointStatus.EndpointStatusSerializer.class) +@JsonDeserialize(using = EndpointStatus.EndpointStatusDeserializer.class) public class EndpointStatus { /** Additional status message */ - @JsonProperty("message") private String message; /** Current state of the endpoint */ - @JsonProperty("state") private EndpointStatusState state; public EndpointStatus setMessage(String message) { @@ -56,4 +65,40 @@ public String toString() { .add("state", state) .toString(); } + + EndpointStatusPb toPb() { + EndpointStatusPb pb = new EndpointStatusPb(); + pb.setMessage(message); + pb.setState(state); + + return pb; + } + + static EndpointStatus fromPb(EndpointStatusPb pb) { + EndpointStatus model = new EndpointStatus(); + model.setMessage(pb.getMessage()); + model.setState(pb.getState()); + + return model; + } + + public static class EndpointStatusSerializer extends JsonSerializer { + @Override + public void serialize(EndpointStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + EndpointStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class EndpointStatusDeserializer extends JsonDeserializer { + @Override + public EndpointStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + EndpointStatusPb pb = mapper.readValue(p, EndpointStatusPb.class); + return EndpointStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusPb.java new file mode 100755 index 000000000..3558b468c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status information of an endpoint */ +@Generated +class EndpointStatusPb { + @JsonProperty("message") + private String message; + + @JsonProperty("state") + private EndpointStatusState state; + + public EndpointStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public EndpointStatusPb setState(EndpointStatusState state) { + this.state = state; + return this; + } + + public EndpointStatusState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointStatusPb that = (EndpointStatusPb) o; + return Objects.equals(message, that.message) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(message, state); + } + + @Override + public String toString() { + return new ToStringer(EndpointStatusPb.class) + .add("message", message) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java index 3ae0561cf..f5919f0b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an endpoint */ @Generated +@JsonSerialize(using = GetEndpointRequest.GetEndpointRequestSerializer.class) +@JsonDeserialize(using = GetEndpointRequest.GetEndpointRequestDeserializer.class) public class GetEndpointRequest { /** Name of the endpoint */ - @JsonIgnore private String endpointName; + private String endpointName; public GetEndpointRequest setEndpointName(String endpointName) { this.endpointName = endpointName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetEndpointRequest.class).add("endpointName", endpointName).toString(); } + + GetEndpointRequestPb toPb() { + GetEndpointRequestPb pb = new GetEndpointRequestPb(); + pb.setEndpointName(endpointName); + + return pb; + } + + static GetEndpointRequest fromPb(GetEndpointRequestPb pb) { + GetEndpointRequest model = new GetEndpointRequest(); + model.setEndpointName(pb.getEndpointName()); + + return model; + } + + public static class GetEndpointRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetEndpointRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetEndpointRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetEndpointRequestDeserializer extends JsonDeserializer { + @Override + public GetEndpointRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetEndpointRequestPb pb = mapper.readValue(p, GetEndpointRequestPb.class); + return GetEndpointRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequestPb.java new file mode 100755 index 000000000..1c0981f0d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an endpoint */ +@Generated +class GetEndpointRequestPb { + @JsonIgnore private String endpointName; + + public GetEndpointRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEndpointRequestPb that = (GetEndpointRequestPb) o; + return Objects.equals(endpointName, that.endpointName); + } + + @Override + public int hashCode() { + return Objects.hash(endpointName); + } + + @Override + public String toString() { + return new ToStringer(GetEndpointRequestPb.class).add("endpointName", endpointName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java index be598b85b..c962f6fba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get an index */ @Generated +@JsonSerialize(using = GetIndexRequest.GetIndexRequestSerializer.class) +@JsonDeserialize(using = GetIndexRequest.GetIndexRequestDeserializer.class) public class GetIndexRequest { /** Name of the index */ - @JsonIgnore private String indexName; + private String indexName; public GetIndexRequest setIndexName(String indexName) { this.indexName = indexName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetIndexRequest.class).add("indexName", indexName).toString(); } + + GetIndexRequestPb toPb() { + GetIndexRequestPb pb = new GetIndexRequestPb(); + pb.setIndexName(indexName); + + return pb; + } + + static GetIndexRequest fromPb(GetIndexRequestPb pb) { + GetIndexRequest model = new GetIndexRequest(); + model.setIndexName(pb.getIndexName()); + + return model; + } + + public static class GetIndexRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetIndexRequestDeserializer extends JsonDeserializer { + @Override + public GetIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetIndexRequestPb pb = mapper.readValue(p, GetIndexRequestPb.class); + return GetIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequestPb.java new file mode 100755 index 000000000..e0665209e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get an index */ +@Generated +class GetIndexRequestPb { + @JsonIgnore private String indexName; + + public GetIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIndexRequestPb that = (GetIndexRequestPb) o; + return Objects.equals(indexName, that.indexName); + } + + @Override + public int hashCode() { + return Objects.hash(indexName); + } + + @Override + public String toString() { + return new ToStringer(GetIndexRequestPb.class).add("indexName", indexName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java index 6fd008732..4fb8e0409 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListEndpointResponse.ListEndpointResponseSerializer.class) +@JsonDeserialize(using = ListEndpointResponse.ListEndpointResponseDeserializer.class) public class ListEndpointResponse { /** An array of Endpoint objects */ - @JsonProperty("endpoints") private Collection endpoints; /** * A token that can be used to get the next page of results. If not present, there are no more * results to show. */ - @JsonProperty("next_page_token") private String nextPageToken; public ListEndpointResponse setEndpoints(Collection endpoints) { @@ -60,4 +69,42 @@ public String toString() { .add("nextPageToken", nextPageToken) .toString(); } + + ListEndpointResponsePb toPb() { + ListEndpointResponsePb pb = new ListEndpointResponsePb(); + pb.setEndpoints(endpoints); + pb.setNextPageToken(nextPageToken); + + return pb; + } + + static ListEndpointResponse fromPb(ListEndpointResponsePb pb) { + ListEndpointResponse model = new ListEndpointResponse(); + model.setEndpoints(pb.getEndpoints()); + model.setNextPageToken(pb.getNextPageToken()); + + return model; + } + + public static class ListEndpointResponseSerializer extends JsonSerializer { + @Override + public void serialize( + ListEndpointResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListEndpointResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListEndpointResponseDeserializer + extends JsonDeserializer { + @Override + public ListEndpointResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListEndpointResponsePb pb = mapper.readValue(p, ListEndpointResponsePb.class); + return ListEndpointResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponsePb.java new file mode 100755 index 000000000..26ab70288 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListEndpointResponsePb { + @JsonProperty("endpoints") + private Collection endpoints; + + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListEndpointResponsePb setEndpoints(Collection endpoints) { + this.endpoints = endpoints; + return this; + } + + public Collection getEndpoints() { + return endpoints; + } + + public ListEndpointResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListEndpointResponsePb that = (ListEndpointResponsePb) o; + return Objects.equals(endpoints, that.endpoints) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(endpoints, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListEndpointResponsePb.class) + .add("endpoints", endpoints) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java index 27e8ac8da..32827f629 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.vectorsearch; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List all endpoints */ @Generated +@JsonSerialize(using = ListEndpointsRequest.ListEndpointsRequestSerializer.class) +@JsonDeserialize(using = ListEndpointsRequest.ListEndpointsRequestDeserializer.class) public class ListEndpointsRequest { /** Token for pagination */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListEndpointsRequest setPageToken(String pageToken) { @@ -42,4 +50,40 @@ public int hashCode() { public String toString() { return new ToStringer(ListEndpointsRequest.class).add("pageToken", pageToken).toString(); } + + ListEndpointsRequestPb toPb() { + ListEndpointsRequestPb pb = new ListEndpointsRequestPb(); + pb.setPageToken(pageToken); + + return pb; + } + + static ListEndpointsRequest fromPb(ListEndpointsRequestPb pb) { + ListEndpointsRequest model = new ListEndpointsRequest(); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListEndpointsRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListEndpointsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListEndpointsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListEndpointsRequestDeserializer + extends JsonDeserializer { + @Override + public ListEndpointsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListEndpointsRequestPb pb = mapper.readValue(p, ListEndpointsRequestPb.class); + return ListEndpointsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequestPb.java new file mode 100755 index 000000000..a4b139ff3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List all endpoints */ +@Generated +class ListEndpointsRequestPb { + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListEndpointsRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListEndpointsRequestPb that = (ListEndpointsRequestPb) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListEndpointsRequestPb.class).add("pageToken", pageToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java index fd77629dd..7486ce970 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.vectorsearch; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List indexes */ @Generated +@JsonSerialize(using = ListIndexesRequest.ListIndexesRequestSerializer.class) +@JsonDeserialize(using = ListIndexesRequest.ListIndexesRequestDeserializer.class) public class ListIndexesRequest { /** Name of the endpoint */ - @JsonIgnore - @QueryParam("endpoint_name") private String endpointName; /** Token for pagination */ - @JsonIgnore - @QueryParam("page_token") private String pageToken; public ListIndexesRequest setEndpointName(String endpointName) { @@ -60,4 +66,40 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + ListIndexesRequestPb toPb() { + ListIndexesRequestPb pb = new ListIndexesRequestPb(); + pb.setEndpointName(endpointName); + pb.setPageToken(pageToken); + + return pb; + } + + static ListIndexesRequest fromPb(ListIndexesRequestPb pb) { + ListIndexesRequest model = new ListIndexesRequest(); + model.setEndpointName(pb.getEndpointName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class ListIndexesRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListIndexesRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListIndexesRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListIndexesRequestDeserializer extends JsonDeserializer { + @Override + public ListIndexesRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListIndexesRequestPb pb = mapper.readValue(p, ListIndexesRequestPb.class); + return ListIndexesRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequestPb.java new file mode 100755 index 000000000..bb8a1c013 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequestPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List indexes */ +@Generated +class ListIndexesRequestPb { + @JsonIgnore + @QueryParam("endpoint_name") + private String endpointName; + + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListIndexesRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public ListIndexesRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListIndexesRequestPb that = (ListIndexesRequestPb) o; + return Objects.equals(endpointName, that.endpointName) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(endpointName, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListIndexesRequestPb.class) + .add("endpointName", endpointName) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java index f26fda411..24a223dfc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListValue.ListValueSerializer.class) +@JsonDeserialize(using = ListValue.ListValueDeserializer.class) public class ListValue { /** Repeated field of dynamically typed values. */ - @JsonProperty("values") private Collection values; public ListValue setValues(Collection values) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(ListValue.class).add("values", values).toString(); } + + ListValuePb toPb() { + ListValuePb pb = new ListValuePb(); + pb.setValues(values); + + return pb; + } + + static ListValue fromPb(ListValuePb pb) { + ListValue model = new ListValue(); + model.setValues(pb.getValues()); + + return model; + } + + public static class ListValueSerializer extends JsonSerializer { + @Override + public void serialize(ListValue value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListValueDeserializer extends JsonDeserializer { + @Override + public ListValue deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListValuePb pb = mapper.readValue(p, ListValuePb.class); + return ListValue.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValuePb.java new file mode 100755 index 000000000..8f84fe979 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValuePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListValuePb { + @JsonProperty("values") + private Collection values; + + public ListValuePb setValues(Collection values) { + this.values = values; + return this; + } + + public Collection getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListValuePb that = (ListValuePb) o; + return Objects.equals(values, that.values); + } + + @Override + public int hashCode() { + return Objects.hash(values); + } + + @Override + public String toString() { + return new ToStringer(ListValuePb.class).add("values", values).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java index 657df2e84..d3eee7512 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListVectorIndexesResponse.ListVectorIndexesResponseSerializer.class) +@JsonDeserialize(using = ListVectorIndexesResponse.ListVectorIndexesResponseDeserializer.class) public class ListVectorIndexesResponse { /** * A token that can be used to get the next page of results. If not present, there are no more * results to show. */ - @JsonProperty("next_page_token") private String nextPageToken; /** */ - @JsonProperty("vector_indexes") private Collection vectorIndexes; public ListVectorIndexesResponse setNextPageToken(String nextPageToken) { @@ -60,4 +69,43 @@ public String toString() { .add("vectorIndexes", vectorIndexes) .toString(); } + + ListVectorIndexesResponsePb toPb() { + ListVectorIndexesResponsePb pb = new ListVectorIndexesResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setVectorIndexes(vectorIndexes); + + return pb; + } + + static ListVectorIndexesResponse fromPb(ListVectorIndexesResponsePb pb) { + ListVectorIndexesResponse model = new ListVectorIndexesResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setVectorIndexes(pb.getVectorIndexes()); + + return model; + } + + public static class ListVectorIndexesResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListVectorIndexesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListVectorIndexesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListVectorIndexesResponseDeserializer + extends JsonDeserializer { + @Override + public ListVectorIndexesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListVectorIndexesResponsePb pb = mapper.readValue(p, ListVectorIndexesResponsePb.class); + return ListVectorIndexesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponsePb.java new file mode 100755 index 000000000..fc9ec0913 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListVectorIndexesResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("vector_indexes") + private Collection vectorIndexes; + + public ListVectorIndexesResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListVectorIndexesResponsePb setVectorIndexes(Collection vectorIndexes) { + this.vectorIndexes = vectorIndexes; + return this; + } + + public Collection getVectorIndexes() { + return vectorIndexes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListVectorIndexesResponsePb that = (ListVectorIndexesResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(vectorIndexes, that.vectorIndexes); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, vectorIndexes); + } + + @Override + public String toString() { + return new ToStringer(ListVectorIndexesResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("vectorIndexes", vectorIndexes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntry.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntry.java index 28f38cfcd..b670c253e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntry.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntry.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Key-value pair. */ @Generated +@JsonSerialize(using = MapStringValueEntry.MapStringValueEntrySerializer.class) +@JsonDeserialize(using = MapStringValueEntry.MapStringValueEntryDeserializer.class) public class MapStringValueEntry { /** Column name. */ - @JsonProperty("key") private String key; /** Column value, nullable. */ - @JsonProperty("value") private Value value; public MapStringValueEntry setKey(String key) { @@ -53,4 +62,41 @@ public int hashCode() { public String toString() { return new ToStringer(MapStringValueEntry.class).add("key", key).add("value", value).toString(); } + + MapStringValueEntryPb toPb() { + MapStringValueEntryPb pb = new MapStringValueEntryPb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static MapStringValueEntry fromPb(MapStringValueEntryPb pb) { + MapStringValueEntry model = new MapStringValueEntry(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class MapStringValueEntrySerializer extends JsonSerializer { + @Override + public void serialize(MapStringValueEntry value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MapStringValueEntryPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MapStringValueEntryDeserializer + extends JsonDeserializer { + @Override + public MapStringValueEntry deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MapStringValueEntryPb pb = mapper.readValue(p, MapStringValueEntryPb.class); + return MapStringValueEntry.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntryPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntryPb.java new file mode 100755 index 000000000..ada7315f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntryPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Key-value pair. */ +@Generated +class MapStringValueEntryPb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private Value value; + + public MapStringValueEntryPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public MapStringValueEntryPb setValue(Value value) { + this.value = value; + return this; + } + + public Value getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MapStringValueEntryPb that = (MapStringValueEntryPb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(MapStringValueEntryPb.class) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java index a44466f2f..be46c3ba5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MiniVectorIndex.MiniVectorIndexSerializer.class) +@JsonDeserialize(using = MiniVectorIndex.MiniVectorIndexDeserializer.class) public class MiniVectorIndex { /** The user who created the index. */ - @JsonProperty("creator") private String creator; /** Name of the endpoint associated with the index */ - @JsonProperty("endpoint_name") private String endpointName; /** @@ -24,15 +33,12 @@ public class MiniVectorIndex { * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages * index updates. */ - @JsonProperty("index_type") private VectorIndexType indexType; /** Name of the index */ - @JsonProperty("name") private String name; /** Primary key of the index */ - @JsonProperty("primary_key") private String primaryKey; public MiniVectorIndex setCreator(String creator) { @@ -107,4 +113,46 @@ public String toString() { .add("primaryKey", primaryKey) .toString(); } + + MiniVectorIndexPb toPb() { + MiniVectorIndexPb pb = new MiniVectorIndexPb(); + pb.setCreator(creator); + pb.setEndpointName(endpointName); + pb.setIndexType(indexType); + pb.setName(name); + pb.setPrimaryKey(primaryKey); + + return pb; + } + + static MiniVectorIndex fromPb(MiniVectorIndexPb pb) { + MiniVectorIndex model = new MiniVectorIndex(); + model.setCreator(pb.getCreator()); + model.setEndpointName(pb.getEndpointName()); + model.setIndexType(pb.getIndexType()); + model.setName(pb.getName()); + model.setPrimaryKey(pb.getPrimaryKey()); + + return model; + } + + public static class MiniVectorIndexSerializer extends JsonSerializer { + @Override + public void serialize(MiniVectorIndex value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MiniVectorIndexPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MiniVectorIndexDeserializer extends JsonDeserializer { + @Override + public MiniVectorIndex deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MiniVectorIndexPb pb = mapper.readValue(p, MiniVectorIndexPb.class); + return MiniVectorIndex.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndexPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndexPb.java new file mode 100755 index 000000000..8a24c3290 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndexPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MiniVectorIndexPb { + @JsonProperty("creator") + private String creator; + + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonProperty("index_type") + private VectorIndexType indexType; + + @JsonProperty("name") + private String name; + + @JsonProperty("primary_key") + private String primaryKey; + + public MiniVectorIndexPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public MiniVectorIndexPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public MiniVectorIndexPb setIndexType(VectorIndexType indexType) { + this.indexType = indexType; + return this; + } + + public VectorIndexType getIndexType() { + return indexType; + } + + public MiniVectorIndexPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public MiniVectorIndexPb setPrimaryKey(String primaryKey) { + this.primaryKey = primaryKey; + return this; + } + + public String getPrimaryKey() { + return primaryKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MiniVectorIndexPb that = (MiniVectorIndexPb) o; + return Objects.equals(creator, that.creator) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexType, that.indexType) + && Objects.equals(name, that.name) + && Objects.equals(primaryKey, that.primaryKey); + } + + @Override + public int hashCode() { + return Objects.hash(creator, endpointName, indexType, name, primaryKey); + } + + @Override + public String toString() { + return new ToStringer(MiniVectorIndexPb.class) + .add("creator", creator) + .add("endpointName", endpointName) + .add("indexType", indexType) + .add("name", name) + .add("primaryKey", primaryKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java index c1d571df8..c645b8ce4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = PatchEndpointBudgetPolicyRequest.PatchEndpointBudgetPolicyRequestSerializer.class) +@JsonDeserialize( + using = PatchEndpointBudgetPolicyRequest.PatchEndpointBudgetPolicyRequestDeserializer.class) public class PatchEndpointBudgetPolicyRequest { /** The budget policy id to be applied */ - @JsonProperty("budget_policy_id") private String budgetPolicyId; /** Name of the vector search endpoint */ - @JsonIgnore private String endpointName; + private String endpointName; public PatchEndpointBudgetPolicyRequest setBudgetPolicyId(String budgetPolicyId) { this.budgetPolicyId = budgetPolicyId; @@ -56,4 +67,44 @@ public String toString() { .add("endpointName", endpointName) .toString(); } + + PatchEndpointBudgetPolicyRequestPb toPb() { + PatchEndpointBudgetPolicyRequestPb pb = new PatchEndpointBudgetPolicyRequestPb(); + pb.setBudgetPolicyId(budgetPolicyId); + pb.setEndpointName(endpointName); + + return pb; + } + + static PatchEndpointBudgetPolicyRequest fromPb(PatchEndpointBudgetPolicyRequestPb pb) { + PatchEndpointBudgetPolicyRequest model = new PatchEndpointBudgetPolicyRequest(); + model.setBudgetPolicyId(pb.getBudgetPolicyId()); + model.setEndpointName(pb.getEndpointName()); + + return model; + } + + public static class PatchEndpointBudgetPolicyRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + PatchEndpointBudgetPolicyRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchEndpointBudgetPolicyRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchEndpointBudgetPolicyRequestDeserializer + extends JsonDeserializer { + @Override + public PatchEndpointBudgetPolicyRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchEndpointBudgetPolicyRequestPb pb = + mapper.readValue(p, PatchEndpointBudgetPolicyRequestPb.class); + return PatchEndpointBudgetPolicyRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequestPb.java new file mode 100755 index 000000000..caf500805 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PatchEndpointBudgetPolicyRequestPb { + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + @JsonIgnore private String endpointName; + + public PatchEndpointBudgetPolicyRequestPb setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public PatchEndpointBudgetPolicyRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PatchEndpointBudgetPolicyRequestPb that = (PatchEndpointBudgetPolicyRequestPb) o; + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(endpointName, that.endpointName); + } + + @Override + public int hashCode() { + return Objects.hash(budgetPolicyId, endpointName); + } + + @Override + public String toString() { + return new ToStringer(PatchEndpointBudgetPolicyRequestPb.class) + .add("budgetPolicyId", budgetPolicyId) + .add("endpointName", endpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java index 2a0e6337b..745566e7c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java @@ -4,13 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = PatchEndpointBudgetPolicyResponse.PatchEndpointBudgetPolicyResponseSerializer.class) +@JsonDeserialize( + using = PatchEndpointBudgetPolicyResponse.PatchEndpointBudgetPolicyResponseDeserializer.class) public class PatchEndpointBudgetPolicyResponse { /** The budget policy applied to the vector search endpoint. */ - @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; public PatchEndpointBudgetPolicyResponse setEffectiveBudgetPolicyId( @@ -42,4 +54,42 @@ public String toString() { .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .toString(); } + + PatchEndpointBudgetPolicyResponsePb toPb() { + PatchEndpointBudgetPolicyResponsePb pb = new PatchEndpointBudgetPolicyResponsePb(); + pb.setEffectiveBudgetPolicyId(effectiveBudgetPolicyId); + + return pb; + } + + static PatchEndpointBudgetPolicyResponse fromPb(PatchEndpointBudgetPolicyResponsePb pb) { + PatchEndpointBudgetPolicyResponse model = new PatchEndpointBudgetPolicyResponse(); + model.setEffectiveBudgetPolicyId(pb.getEffectiveBudgetPolicyId()); + + return model; + } + + public static class PatchEndpointBudgetPolicyResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + PatchEndpointBudgetPolicyResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PatchEndpointBudgetPolicyResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PatchEndpointBudgetPolicyResponseDeserializer + extends JsonDeserializer { + @Override + public PatchEndpointBudgetPolicyResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PatchEndpointBudgetPolicyResponsePb pb = + mapper.readValue(p, PatchEndpointBudgetPolicyResponsePb.class); + return PatchEndpointBudgetPolicyResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponsePb.java new file mode 100755 index 000000000..6b616c5b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PatchEndpointBudgetPolicyResponsePb { + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + public PatchEndpointBudgetPolicyResponsePb setEffectiveBudgetPolicyId( + String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PatchEndpointBudgetPolicyResponsePb that = (PatchEndpointBudgetPolicyResponsePb) o; + return Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(effectiveBudgetPolicyId); + } + + @Override + public String toString() { + return new ToStringer(PatchEndpointBudgetPolicyResponsePb.class) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequest.java index e2606c848..82f022c27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequest.java @@ -4,22 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Request payload for getting next page of results. */ @Generated +@JsonSerialize( + using = QueryVectorIndexNextPageRequest.QueryVectorIndexNextPageRequestSerializer.class) +@JsonDeserialize( + using = QueryVectorIndexNextPageRequest.QueryVectorIndexNextPageRequestDeserializer.class) public class QueryVectorIndexNextPageRequest { /** Name of the endpoint. */ - @JsonProperty("endpoint_name") private String endpointName; /** Name of the vector index to query. */ - @JsonIgnore private String indexName; + private String indexName; /** Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API. */ - @JsonProperty("page_token") private String pageToken; public QueryVectorIndexNextPageRequest setEndpointName(String endpointName) { @@ -72,4 +82,46 @@ public String toString() { .add("pageToken", pageToken) .toString(); } + + QueryVectorIndexNextPageRequestPb toPb() { + QueryVectorIndexNextPageRequestPb pb = new QueryVectorIndexNextPageRequestPb(); + pb.setEndpointName(endpointName); + pb.setIndexName(indexName); + pb.setPageToken(pageToken); + + return pb; + } + + static QueryVectorIndexNextPageRequest fromPb(QueryVectorIndexNextPageRequestPb pb) { + QueryVectorIndexNextPageRequest model = new QueryVectorIndexNextPageRequest(); + model.setEndpointName(pb.getEndpointName()); + model.setIndexName(pb.getIndexName()); + model.setPageToken(pb.getPageToken()); + + return model; + } + + public static class QueryVectorIndexNextPageRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + QueryVectorIndexNextPageRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryVectorIndexNextPageRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryVectorIndexNextPageRequestDeserializer + extends JsonDeserializer { + @Override + public QueryVectorIndexNextPageRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryVectorIndexNextPageRequestPb pb = + mapper.readValue(p, QueryVectorIndexNextPageRequestPb.class); + return QueryVectorIndexNextPageRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequestPb.java new file mode 100755 index 000000000..466ed08fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Request payload for getting next page of results. */ +@Generated +class QueryVectorIndexNextPageRequestPb { + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonIgnore private String indexName; + + @JsonProperty("page_token") + private String pageToken; + + public QueryVectorIndexNextPageRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public QueryVectorIndexNextPageRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + public QueryVectorIndexNextPageRequestPb setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryVectorIndexNextPageRequestPb that = (QueryVectorIndexNextPageRequestPb) o; + return Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexName, that.indexName) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(endpointName, indexName, pageToken); + } + + @Override + public String toString() { + return new ToStringer(QueryVectorIndexNextPageRequestPb.class) + .add("endpointName", endpointName) + .add("indexName", indexName) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java index e07f748b7..41de29911 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java @@ -4,19 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = QueryVectorIndexRequest.QueryVectorIndexRequestSerializer.class) +@JsonDeserialize(using = QueryVectorIndexRequest.QueryVectorIndexRequestDeserializer.class) public class QueryVectorIndexRequest { /** List of column names to include in the response. */ - @JsonProperty("columns") private Collection columns; /** Column names used to retrieve data to send to the reranker. */ - @JsonProperty("columns_to_rerank") private Collection columnsToRerank; /** @@ -28,33 +36,27 @@ public class QueryVectorIndexRequest { * `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater * than equal to 5. - `{"id": 5}`: Filter for id equal to 5. */ - @JsonProperty("filters_json") private String filtersJson; /** Name of the vector index to query. */ - @JsonIgnore private String indexName; + private String indexName; /** Number of results to return. Defaults to 10. */ - @JsonProperty("num_results") private Long numResults; /** Query text. Required for Delta Sync Index using model endpoint. */ - @JsonProperty("query_text") private String queryText; /** The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`. */ - @JsonProperty("query_type") private String queryType; /** * Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed * vectors. */ - @JsonProperty("query_vector") private Collection queryVector; /** Threshold for the approximate nearest neighbor search. Defaults to 0.0. */ - @JsonProperty("score_threshold") private Double scoreThreshold; public QueryVectorIndexRequest setColumns(Collection columns) { @@ -182,4 +184,57 @@ public String toString() { .add("scoreThreshold", scoreThreshold) .toString(); } + + QueryVectorIndexRequestPb toPb() { + QueryVectorIndexRequestPb pb = new QueryVectorIndexRequestPb(); + pb.setColumns(columns); + pb.setColumnsToRerank(columnsToRerank); + pb.setFiltersJson(filtersJson); + pb.setIndexName(indexName); + pb.setNumResults(numResults); + pb.setQueryText(queryText); + pb.setQueryType(queryType); + pb.setQueryVector(queryVector); + pb.setScoreThreshold(scoreThreshold); + + return pb; + } + + static QueryVectorIndexRequest fromPb(QueryVectorIndexRequestPb pb) { + QueryVectorIndexRequest model = new QueryVectorIndexRequest(); + model.setColumns(pb.getColumns()); + model.setColumnsToRerank(pb.getColumnsToRerank()); + model.setFiltersJson(pb.getFiltersJson()); + model.setIndexName(pb.getIndexName()); + model.setNumResults(pb.getNumResults()); + model.setQueryText(pb.getQueryText()); + model.setQueryType(pb.getQueryType()); + model.setQueryVector(pb.getQueryVector()); + model.setScoreThreshold(pb.getScoreThreshold()); + + return model; + } + + public static class QueryVectorIndexRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + QueryVectorIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryVectorIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryVectorIndexRequestDeserializer + extends JsonDeserializer { + @Override + public QueryVectorIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryVectorIndexRequestPb pb = mapper.readValue(p, QueryVectorIndexRequestPb.class); + return QueryVectorIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequestPb.java new file mode 100755 index 000000000..9a916ad19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequestPb.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class QueryVectorIndexRequestPb { + @JsonProperty("columns") + private Collection columns; + + @JsonProperty("columns_to_rerank") + private Collection columnsToRerank; + + @JsonProperty("filters_json") + private String filtersJson; + + @JsonIgnore private String indexName; + + @JsonProperty("num_results") + private Long numResults; + + @JsonProperty("query_text") + private String queryText; + + @JsonProperty("query_type") + private String queryType; + + @JsonProperty("query_vector") + private Collection queryVector; + + @JsonProperty("score_threshold") + private Double scoreThreshold; + + public QueryVectorIndexRequestPb setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public QueryVectorIndexRequestPb setColumnsToRerank(Collection columnsToRerank) { + this.columnsToRerank = columnsToRerank; + return this; + } + + public Collection getColumnsToRerank() { + return columnsToRerank; + } + + public QueryVectorIndexRequestPb setFiltersJson(String filtersJson) { + this.filtersJson = filtersJson; + return this; + } + + public String getFiltersJson() { + return filtersJson; + } + + public QueryVectorIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + public QueryVectorIndexRequestPb setNumResults(Long numResults) { + this.numResults = numResults; + return this; + } + + public Long getNumResults() { + return numResults; + } + + public QueryVectorIndexRequestPb setQueryText(String queryText) { + this.queryText = queryText; + return this; + } + + public String getQueryText() { + return queryText; + } + + public QueryVectorIndexRequestPb setQueryType(String queryType) { + this.queryType = queryType; + return this; + } + + public String getQueryType() { + return queryType; + } + + public QueryVectorIndexRequestPb setQueryVector(Collection queryVector) { + this.queryVector = queryVector; + return this; + } + + public Collection getQueryVector() { + return queryVector; + } + + public QueryVectorIndexRequestPb setScoreThreshold(Double scoreThreshold) { + this.scoreThreshold = scoreThreshold; + return this; + } + + public Double getScoreThreshold() { + return scoreThreshold; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryVectorIndexRequestPb that = (QueryVectorIndexRequestPb) o; + return Objects.equals(columns, that.columns) + && Objects.equals(columnsToRerank, that.columnsToRerank) + && Objects.equals(filtersJson, that.filtersJson) + && Objects.equals(indexName, that.indexName) + && Objects.equals(numResults, that.numResults) + && Objects.equals(queryText, that.queryText) + && Objects.equals(queryType, that.queryType) + && Objects.equals(queryVector, that.queryVector) + && Objects.equals(scoreThreshold, that.scoreThreshold); + } + + @Override + public int hashCode() { + return Objects.hash( + columns, + columnsToRerank, + filtersJson, + indexName, + numResults, + queryText, + queryType, + queryVector, + scoreThreshold); + } + + @Override + public String toString() { + return new ToStringer(QueryVectorIndexRequestPb.class) + .add("columns", columns) + .add("columnsToRerank", columnsToRerank) + .add("filtersJson", filtersJson) + .add("indexName", indexName) + .add("numResults", numResults) + .add("queryText", queryText) + .add("queryType", queryType) + .add("queryVector", queryVector) + .add("scoreThreshold", scoreThreshold) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java index c47a848af..dc8d20111 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = QueryVectorIndexResponse.QueryVectorIndexResponseSerializer.class) +@JsonDeserialize(using = QueryVectorIndexResponse.QueryVectorIndexResponseDeserializer.class) public class QueryVectorIndexResponse { /** Metadata about the result set. */ - @JsonProperty("manifest") private ResultManifest manifest; /** @@ -19,11 +29,9 @@ public class QueryVectorIndexResponse { * Empty value means no more results. The maximum number of results that can be returned is * 10,000. */ - @JsonProperty("next_page_token") private String nextPageToken; /** Data returned in the query result. */ - @JsonProperty("result") private ResultData result; public QueryVectorIndexResponse setManifest(ResultManifest manifest) { @@ -76,4 +84,45 @@ public String toString() { .add("result", result) .toString(); } + + QueryVectorIndexResponsePb toPb() { + QueryVectorIndexResponsePb pb = new QueryVectorIndexResponsePb(); + pb.setManifest(manifest); + pb.setNextPageToken(nextPageToken); + pb.setResult(result); + + return pb; + } + + static QueryVectorIndexResponse fromPb(QueryVectorIndexResponsePb pb) { + QueryVectorIndexResponse model = new QueryVectorIndexResponse(); + model.setManifest(pb.getManifest()); + model.setNextPageToken(pb.getNextPageToken()); + model.setResult(pb.getResult()); + + return model; + } + + public static class QueryVectorIndexResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + QueryVectorIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + QueryVectorIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class QueryVectorIndexResponseDeserializer + extends JsonDeserializer { + @Override + public QueryVectorIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + QueryVectorIndexResponsePb pb = mapper.readValue(p, QueryVectorIndexResponsePb.class); + return QueryVectorIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponsePb.java new file mode 100755 index 000000000..f37365563 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class QueryVectorIndexResponsePb { + @JsonProperty("manifest") + private ResultManifest manifest; + + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("result") + private ResultData result; + + public QueryVectorIndexResponsePb setManifest(ResultManifest manifest) { + this.manifest = manifest; + return this; + } + + public ResultManifest getManifest() { + return manifest; + } + + public QueryVectorIndexResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public QueryVectorIndexResponsePb setResult(ResultData result) { + this.result = result; + return this; + } + + public ResultData getResult() { + return result; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryVectorIndexResponsePb that = (QueryVectorIndexResponsePb) o; + return Objects.equals(manifest, that.manifest) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(manifest, nextPageToken, result); + } + + @Override + public String toString() { + return new ToStringer(QueryVectorIndexResponsePb.class) + .add("manifest", manifest) + .add("nextPageToken", nextPageToken) + .add("result", result) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java index 76b6bf9ac..50a9483ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Data returned in the query result. */ @Generated +@JsonSerialize(using = ResultData.ResultDataSerializer.class) +@JsonDeserialize(using = ResultData.ResultDataDeserializer.class) public class ResultData { /** Data rows returned in the query. */ - @JsonProperty("data_array") private Collection> dataArray; /** Number of rows in the result set. */ - @JsonProperty("row_count") private Long rowCount; public ResultData setDataArray(Collection> dataArray) { @@ -57,4 +66,39 @@ public String toString() { .add("rowCount", rowCount) .toString(); } + + ResultDataPb toPb() { + ResultDataPb pb = new ResultDataPb(); + pb.setDataArray(dataArray); + pb.setRowCount(rowCount); + + return pb; + } + + static ResultData fromPb(ResultDataPb pb) { + ResultData model = new ResultData(); + model.setDataArray(pb.getDataArray()); + model.setRowCount(pb.getRowCount()); + + return model; + } + + public static class ResultDataSerializer extends JsonSerializer { + @Override + public void serialize(ResultData value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultDataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultDataDeserializer extends JsonDeserializer { + @Override + public ResultData deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultDataPb pb = mapper.readValue(p, ResultDataPb.class); + return ResultData.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultDataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultDataPb.java new file mode 100755 index 000000000..3182ab6e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultDataPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Data returned in the query result. */ +@Generated +class ResultDataPb { + @JsonProperty("data_array") + private Collection> dataArray; + + @JsonProperty("row_count") + private Long rowCount; + + public ResultDataPb setDataArray(Collection> dataArray) { + this.dataArray = dataArray; + return this; + } + + public Collection> getDataArray() { + return dataArray; + } + + public ResultDataPb setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultDataPb that = (ResultDataPb) o; + return Objects.equals(dataArray, that.dataArray) && Objects.equals(rowCount, that.rowCount); + } + + @Override + public int hashCode() { + return Objects.hash(dataArray, rowCount); + } + + @Override + public String toString() { + return new ToStringer(ResultDataPb.class) + .add("dataArray", dataArray) + .add("rowCount", rowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java index bf365fb38..cbee5cc02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Metadata about the result set. */ @Generated +@JsonSerialize(using = ResultManifest.ResultManifestSerializer.class) +@JsonDeserialize(using = ResultManifest.ResultManifestDeserializer.class) public class ResultManifest { /** Number of columns in the result set. */ - @JsonProperty("column_count") private Long columnCount; /** Information about each column in the result set. */ - @JsonProperty("columns") private Collection columns; public ResultManifest setColumnCount(Long columnCount) { @@ -57,4 +66,40 @@ public String toString() { .add("columns", columns) .toString(); } + + ResultManifestPb toPb() { + ResultManifestPb pb = new ResultManifestPb(); + pb.setColumnCount(columnCount); + pb.setColumns(columns); + + return pb; + } + + static ResultManifest fromPb(ResultManifestPb pb) { + ResultManifest model = new ResultManifest(); + model.setColumnCount(pb.getColumnCount()); + model.setColumns(pb.getColumns()); + + return model; + } + + public static class ResultManifestSerializer extends JsonSerializer { + @Override + public void serialize(ResultManifest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ResultManifestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ResultManifestDeserializer extends JsonDeserializer { + @Override + public ResultManifest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ResultManifestPb pb = mapper.readValue(p, ResultManifestPb.class); + return ResultManifest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifestPb.java new file mode 100755 index 000000000..468bcf267 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Metadata about the result set. */ +@Generated +class ResultManifestPb { + @JsonProperty("column_count") + private Long columnCount; + + @JsonProperty("columns") + private Collection columns; + + public ResultManifestPb setColumnCount(Long columnCount) { + this.columnCount = columnCount; + return this; + } + + public Long getColumnCount() { + return columnCount; + } + + public ResultManifestPb setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResultManifestPb that = (ResultManifestPb) o; + return Objects.equals(columnCount, that.columnCount) && Objects.equals(columns, that.columns); + } + + @Override + public int hashCode() { + return Objects.hash(columnCount, columns); + } + + @Override + public String toString() { + return new ToStringer(ResultManifestPb.class) + .add("columnCount", columnCount) + .add("columns", columns) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java index 19ad5ae64..1fe09ffa9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ScanVectorIndexRequest.ScanVectorIndexRequestSerializer.class) +@JsonDeserialize(using = ScanVectorIndexRequest.ScanVectorIndexRequestDeserializer.class) public class ScanVectorIndexRequest { /** Name of the vector index to scan. */ - @JsonIgnore private String indexName; + private String indexName; /** Primary key of the last entry returned in the previous scan. */ - @JsonProperty("last_primary_key") private String lastPrimaryKey; /** Number of results to return. Defaults to 10. */ - @JsonProperty("num_results") private Long numResults; public ScanVectorIndexRequest setIndexName(String indexName) { @@ -71,4 +79,45 @@ public String toString() { .add("numResults", numResults) .toString(); } + + ScanVectorIndexRequestPb toPb() { + ScanVectorIndexRequestPb pb = new ScanVectorIndexRequestPb(); + pb.setIndexName(indexName); + pb.setLastPrimaryKey(lastPrimaryKey); + pb.setNumResults(numResults); + + return pb; + } + + static ScanVectorIndexRequest fromPb(ScanVectorIndexRequestPb pb) { + ScanVectorIndexRequest model = new ScanVectorIndexRequest(); + model.setIndexName(pb.getIndexName()); + model.setLastPrimaryKey(pb.getLastPrimaryKey()); + model.setNumResults(pb.getNumResults()); + + return model; + } + + public static class ScanVectorIndexRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + ScanVectorIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ScanVectorIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ScanVectorIndexRequestDeserializer + extends JsonDeserializer { + @Override + public ScanVectorIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ScanVectorIndexRequestPb pb = mapper.readValue(p, ScanVectorIndexRequestPb.class); + return ScanVectorIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequestPb.java new file mode 100755 index 000000000..1b938dd1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ScanVectorIndexRequestPb { + @JsonIgnore private String indexName; + + @JsonProperty("last_primary_key") + private String lastPrimaryKey; + + @JsonProperty("num_results") + private Long numResults; + + public ScanVectorIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + public ScanVectorIndexRequestPb setLastPrimaryKey(String lastPrimaryKey) { + this.lastPrimaryKey = lastPrimaryKey; + return this; + } + + public String getLastPrimaryKey() { + return lastPrimaryKey; + } + + public ScanVectorIndexRequestPb setNumResults(Long numResults) { + this.numResults = numResults; + return this; + } + + public Long getNumResults() { + return numResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ScanVectorIndexRequestPb that = (ScanVectorIndexRequestPb) o; + return Objects.equals(indexName, that.indexName) + && Objects.equals(lastPrimaryKey, that.lastPrimaryKey) + && Objects.equals(numResults, that.numResults); + } + + @Override + public int hashCode() { + return Objects.hash(indexName, lastPrimaryKey, numResults); + } + + @Override + public String toString() { + return new ToStringer(ScanVectorIndexRequestPb.class) + .add("indexName", indexName) + .add("lastPrimaryKey", lastPrimaryKey) + .add("numResults", numResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java index b341eeebb..6bd0f9598 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Response to a scan vector index request. */ @Generated +@JsonSerialize(using = ScanVectorIndexResponse.ScanVectorIndexResponseSerializer.class) +@JsonDeserialize(using = ScanVectorIndexResponse.ScanVectorIndexResponseDeserializer.class) public class ScanVectorIndexResponse { /** List of data entries */ - @JsonProperty("data") private Collection data; /** Primary key of the last entry. */ - @JsonProperty("last_primary_key") private String lastPrimaryKey; public ScanVectorIndexResponse setData(Collection data) { @@ -57,4 +66,43 @@ public String toString() { .add("lastPrimaryKey", lastPrimaryKey) .toString(); } + + ScanVectorIndexResponsePb toPb() { + ScanVectorIndexResponsePb pb = new ScanVectorIndexResponsePb(); + pb.setData(data); + pb.setLastPrimaryKey(lastPrimaryKey); + + return pb; + } + + static ScanVectorIndexResponse fromPb(ScanVectorIndexResponsePb pb) { + ScanVectorIndexResponse model = new ScanVectorIndexResponse(); + model.setData(pb.getData()); + model.setLastPrimaryKey(pb.getLastPrimaryKey()); + + return model; + } + + public static class ScanVectorIndexResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ScanVectorIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ScanVectorIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ScanVectorIndexResponseDeserializer + extends JsonDeserializer { + @Override + public ScanVectorIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ScanVectorIndexResponsePb pb = mapper.readValue(p, ScanVectorIndexResponsePb.class); + return ScanVectorIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponsePb.java new file mode 100755 index 000000000..19940f3fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponsePb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response to a scan vector index request. */ +@Generated +class ScanVectorIndexResponsePb { + @JsonProperty("data") + private Collection data; + + @JsonProperty("last_primary_key") + private String lastPrimaryKey; + + public ScanVectorIndexResponsePb setData(Collection data) { + this.data = data; + return this; + } + + public Collection getData() { + return data; + } + + public ScanVectorIndexResponsePb setLastPrimaryKey(String lastPrimaryKey) { + this.lastPrimaryKey = lastPrimaryKey; + return this; + } + + public String getLastPrimaryKey() { + return lastPrimaryKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ScanVectorIndexResponsePb that = (ScanVectorIndexResponsePb) o; + return Objects.equals(data, that.data) && Objects.equals(lastPrimaryKey, that.lastPrimaryKey); + } + + @Override + public int hashCode() { + return Objects.hash(data, lastPrimaryKey); + } + + @Override + public String toString() { + return new ToStringer(ScanVectorIndexResponsePb.class) + .add("data", data) + .add("lastPrimaryKey", lastPrimaryKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java index 7e8e17153..4a9a5c9f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = Struct.StructSerializer.class) +@JsonDeserialize(using = Struct.StructDeserializer.class) public class Struct { /** Data entry, corresponding to a row in a vector index. */ - @JsonProperty("fields") private Collection fields; public Struct setFields(Collection fields) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(Struct.class).add("fields", fields).toString(); } + + StructPb toPb() { + StructPb pb = new StructPb(); + pb.setFields(fields); + + return pb; + } + + static Struct fromPb(StructPb pb) { + Struct model = new Struct(); + model.setFields(pb.getFields()); + + return model; + } + + public static class StructSerializer extends JsonSerializer { + @Override + public void serialize(Struct value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + StructPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class StructDeserializer extends JsonDeserializer { + @Override + public Struct deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + StructPb pb = mapper.readValue(p, StructPb.class); + return Struct.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/StructPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/StructPb.java new file mode 100755 index 000000000..4634fdd84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/StructPb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class StructPb { + @JsonProperty("fields") + private Collection fields; + + public StructPb setFields(Collection fields) { + this.fields = fields; + return this; + } + + public Collection getFields() { + return fields; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StructPb that = (StructPb) o; + return Objects.equals(fields, that.fields); + } + + @Override + public int hashCode() { + return Objects.hash(fields); + } + + @Override + public String toString() { + return new ToStringer(StructPb.class).add("fields", fields).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java index 06de13154..ca76291f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Synchronize an index */ @Generated +@JsonSerialize(using = SyncIndexRequest.SyncIndexRequestSerializer.class) +@JsonDeserialize(using = SyncIndexRequest.SyncIndexRequestDeserializer.class) public class SyncIndexRequest { /** Name of the vector index to synchronize. Must be a Delta Sync Index. */ - @JsonIgnore private String indexName; + private String indexName; public SyncIndexRequest setIndexName(String indexName) { this.indexName = indexName; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(SyncIndexRequest.class).add("indexName", indexName).toString(); } + + SyncIndexRequestPb toPb() { + SyncIndexRequestPb pb = new SyncIndexRequestPb(); + pb.setIndexName(indexName); + + return pb; + } + + static SyncIndexRequest fromPb(SyncIndexRequestPb pb) { + SyncIndexRequest model = new SyncIndexRequest(); + model.setIndexName(pb.getIndexName()); + + return model; + } + + public static class SyncIndexRequestSerializer extends JsonSerializer { + @Override + public void serialize(SyncIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncIndexRequestDeserializer extends JsonDeserializer { + @Override + public SyncIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncIndexRequestPb pb = mapper.readValue(p, SyncIndexRequestPb.class); + return SyncIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequestPb.java new file mode 100755 index 000000000..ee6e2d360 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Synchronize an index */ +@Generated +class SyncIndexRequestPb { + @JsonIgnore private String indexName; + + public SyncIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncIndexRequestPb that = (SyncIndexRequestPb) o; + return Objects.equals(indexName, that.indexName); + } + + @Override + public int hashCode() { + return Objects.hash(indexName); + } + + @Override + public String toString() { + return new ToStringer(SyncIndexRequestPb.class).add("indexName", indexName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java index f2c937b76..ea87da919 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SyncIndexResponse.SyncIndexResponseSerializer.class) +@JsonDeserialize(using = SyncIndexResponse.SyncIndexResponseDeserializer.class) public class SyncIndexResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(SyncIndexResponse.class).toString(); } + + SyncIndexResponsePb toPb() { + SyncIndexResponsePb pb = new SyncIndexResponsePb(); + + return pb; + } + + static SyncIndexResponse fromPb(SyncIndexResponsePb pb) { + SyncIndexResponse model = new SyncIndexResponse(); + + return model; + } + + public static class SyncIndexResponseSerializer extends JsonSerializer { + @Override + public void serialize(SyncIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SyncIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SyncIndexResponseDeserializer extends JsonDeserializer { + @Override + public SyncIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SyncIndexResponsePb pb = mapper.readValue(p, SyncIndexResponsePb.class); + return SyncIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponsePb.java new file mode 100755 index 000000000..ccef1a18a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class SyncIndexResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SyncIndexResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java index 3a246a96a..271507c5d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java @@ -4,19 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateEndpointCustomTagsRequest.UpdateEndpointCustomTagsRequestSerializer.class) +@JsonDeserialize( + using = UpdateEndpointCustomTagsRequest.UpdateEndpointCustomTagsRequestDeserializer.class) public class UpdateEndpointCustomTagsRequest { /** The new custom tags for the vector search endpoint */ - @JsonProperty("custom_tags") private Collection customTags; /** Name of the vector search endpoint */ - @JsonIgnore private String endpointName; + private String endpointName; public UpdateEndpointCustomTagsRequest setCustomTags(Collection customTags) { this.customTags = customTags; @@ -57,4 +68,44 @@ public String toString() { .add("endpointName", endpointName) .toString(); } + + UpdateEndpointCustomTagsRequestPb toPb() { + UpdateEndpointCustomTagsRequestPb pb = new UpdateEndpointCustomTagsRequestPb(); + pb.setCustomTags(customTags); + pb.setEndpointName(endpointName); + + return pb; + } + + static UpdateEndpointCustomTagsRequest fromPb(UpdateEndpointCustomTagsRequestPb pb) { + UpdateEndpointCustomTagsRequest model = new UpdateEndpointCustomTagsRequest(); + model.setCustomTags(pb.getCustomTags()); + model.setEndpointName(pb.getEndpointName()); + + return model; + } + + public static class UpdateEndpointCustomTagsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEndpointCustomTagsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateEndpointCustomTagsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEndpointCustomTagsRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateEndpointCustomTagsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEndpointCustomTagsRequestPb pb = + mapper.readValue(p, UpdateEndpointCustomTagsRequestPb.class); + return UpdateEndpointCustomTagsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequestPb.java new file mode 100755 index 000000000..37c121754 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequestPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateEndpointCustomTagsRequestPb { + @JsonProperty("custom_tags") + private Collection customTags; + + @JsonIgnore private String endpointName; + + public UpdateEndpointCustomTagsRequestPb setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public UpdateEndpointCustomTagsRequestPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEndpointCustomTagsRequestPb that = (UpdateEndpointCustomTagsRequestPb) o; + return Objects.equals(customTags, that.customTags) + && Objects.equals(endpointName, that.endpointName); + } + + @Override + public int hashCode() { + return Objects.hash(customTags, endpointName); + } + + @Override + public String toString() { + return new ToStringer(UpdateEndpointCustomTagsRequestPb.class) + .add("customTags", customTags) + .add("endpointName", endpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java index 99fee54ed..b4abac97c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java @@ -4,18 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = UpdateEndpointCustomTagsResponse.UpdateEndpointCustomTagsResponseSerializer.class) +@JsonDeserialize( + using = UpdateEndpointCustomTagsResponse.UpdateEndpointCustomTagsResponseDeserializer.class) public class UpdateEndpointCustomTagsResponse { /** All the custom tags that are applied to the vector search endpoint. */ - @JsonProperty("custom_tags") private Collection customTags; /** The name of the vector search endpoint whose custom tags were updated. */ - @JsonProperty("name") private String name; public UpdateEndpointCustomTagsResponse setCustomTags(Collection customTags) { @@ -56,4 +67,44 @@ public String toString() { .add("name", name) .toString(); } + + UpdateEndpointCustomTagsResponsePb toPb() { + UpdateEndpointCustomTagsResponsePb pb = new UpdateEndpointCustomTagsResponsePb(); + pb.setCustomTags(customTags); + pb.setName(name); + + return pb; + } + + static UpdateEndpointCustomTagsResponse fromPb(UpdateEndpointCustomTagsResponsePb pb) { + UpdateEndpointCustomTagsResponse model = new UpdateEndpointCustomTagsResponse(); + model.setCustomTags(pb.getCustomTags()); + model.setName(pb.getName()); + + return model; + } + + public static class UpdateEndpointCustomTagsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateEndpointCustomTagsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateEndpointCustomTagsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateEndpointCustomTagsResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateEndpointCustomTagsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateEndpointCustomTagsResponsePb pb = + mapper.readValue(p, UpdateEndpointCustomTagsResponsePb.class); + return UpdateEndpointCustomTagsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponsePb.java new file mode 100755 index 000000000..dfbb2a272 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpdateEndpointCustomTagsResponsePb { + @JsonProperty("custom_tags") + private Collection customTags; + + @JsonProperty("name") + private String name; + + public UpdateEndpointCustomTagsResponsePb setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public UpdateEndpointCustomTagsResponsePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEndpointCustomTagsResponsePb that = (UpdateEndpointCustomTagsResponsePb) o; + return Objects.equals(customTags, that.customTags) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(customTags, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateEndpointCustomTagsResponsePb.class) + .add("customTags", customTags) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java index 6cc7222b6..c3bf641ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java @@ -4,18 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = UpsertDataResult.UpsertDataResultSerializer.class) +@JsonDeserialize(using = UpsertDataResult.UpsertDataResultDeserializer.class) public class UpsertDataResult { /** List of primary keys for rows that failed to process. */ - @JsonProperty("failed_primary_keys") private Collection failedPrimaryKeys; /** Count of successfully processed rows. */ - @JsonProperty("success_row_count") private Long successRowCount; public UpsertDataResult setFailedPrimaryKeys(Collection failedPrimaryKeys) { @@ -57,4 +66,40 @@ public String toString() { .add("successRowCount", successRowCount) .toString(); } + + UpsertDataResultPb toPb() { + UpsertDataResultPb pb = new UpsertDataResultPb(); + pb.setFailedPrimaryKeys(failedPrimaryKeys); + pb.setSuccessRowCount(successRowCount); + + return pb; + } + + static UpsertDataResult fromPb(UpsertDataResultPb pb) { + UpsertDataResult model = new UpsertDataResult(); + model.setFailedPrimaryKeys(pb.getFailedPrimaryKeys()); + model.setSuccessRowCount(pb.getSuccessRowCount()); + + return model; + } + + public static class UpsertDataResultSerializer extends JsonSerializer { + @Override + public void serialize(UpsertDataResult value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpsertDataResultPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpsertDataResultDeserializer extends JsonDeserializer { + @Override + public UpsertDataResult deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpsertDataResultPb pb = mapper.readValue(p, UpsertDataResultPb.class); + return UpsertDataResult.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResultPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResultPb.java new file mode 100755 index 000000000..2a9fec768 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResultPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class UpsertDataResultPb { + @JsonProperty("failed_primary_keys") + private Collection failedPrimaryKeys; + + @JsonProperty("success_row_count") + private Long successRowCount; + + public UpsertDataResultPb setFailedPrimaryKeys(Collection failedPrimaryKeys) { + this.failedPrimaryKeys = failedPrimaryKeys; + return this; + } + + public Collection getFailedPrimaryKeys() { + return failedPrimaryKeys; + } + + public UpsertDataResultPb setSuccessRowCount(Long successRowCount) { + this.successRowCount = successRowCount; + return this; + } + + public Long getSuccessRowCount() { + return successRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpsertDataResultPb that = (UpsertDataResultPb) o; + return Objects.equals(failedPrimaryKeys, that.failedPrimaryKeys) + && Objects.equals(successRowCount, that.successRowCount); + } + + @Override + public int hashCode() { + return Objects.hash(failedPrimaryKeys, successRowCount); + } + + @Override + public String toString() { + return new ToStringer(UpsertDataResultPb.class) + .add("failedPrimaryKeys", failedPrimaryKeys) + .add("successRowCount", successRowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java index e2bc68471..948defd22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java @@ -4,19 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpsertDataVectorIndexRequest.UpsertDataVectorIndexRequestSerializer.class) +@JsonDeserialize( + using = UpsertDataVectorIndexRequest.UpsertDataVectorIndexRequestDeserializer.class) public class UpsertDataVectorIndexRequest { /** * Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. */ - @JsonIgnore private String indexName; + private String indexName; /** JSON string representing the data to be upserted. */ - @JsonProperty("inputs_json") private String inputsJson; public UpsertDataVectorIndexRequest setIndexName(String indexName) { @@ -57,4 +67,43 @@ public String toString() { .add("inputsJson", inputsJson) .toString(); } + + UpsertDataVectorIndexRequestPb toPb() { + UpsertDataVectorIndexRequestPb pb = new UpsertDataVectorIndexRequestPb(); + pb.setIndexName(indexName); + pb.setInputsJson(inputsJson); + + return pb; + } + + static UpsertDataVectorIndexRequest fromPb(UpsertDataVectorIndexRequestPb pb) { + UpsertDataVectorIndexRequest model = new UpsertDataVectorIndexRequest(); + model.setIndexName(pb.getIndexName()); + model.setInputsJson(pb.getInputsJson()); + + return model; + } + + public static class UpsertDataVectorIndexRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpsertDataVectorIndexRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpsertDataVectorIndexRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpsertDataVectorIndexRequestDeserializer + extends JsonDeserializer { + @Override + public UpsertDataVectorIndexRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpsertDataVectorIndexRequestPb pb = mapper.readValue(p, UpsertDataVectorIndexRequestPb.class); + return UpsertDataVectorIndexRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequestPb.java new file mode 100755 index 000000000..2623d2e19 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpsertDataVectorIndexRequestPb { + @JsonIgnore private String indexName; + + @JsonProperty("inputs_json") + private String inputsJson; + + public UpsertDataVectorIndexRequestPb setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + public UpsertDataVectorIndexRequestPb setInputsJson(String inputsJson) { + this.inputsJson = inputsJson; + return this; + } + + public String getInputsJson() { + return inputsJson; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpsertDataVectorIndexRequestPb that = (UpsertDataVectorIndexRequestPb) o; + return Objects.equals(indexName, that.indexName) && Objects.equals(inputsJson, that.inputsJson); + } + + @Override + public int hashCode() { + return Objects.hash(indexName, inputsJson); + } + + @Override + public String toString() { + return new ToStringer(UpsertDataVectorIndexRequestPb.class) + .add("indexName", indexName) + .add("inputsJson", inputsJson) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java index 4bf8cac17..8a12f5774 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpsertDataVectorIndexResponse.UpsertDataVectorIndexResponseSerializer.class) +@JsonDeserialize( + using = UpsertDataVectorIndexResponse.UpsertDataVectorIndexResponseDeserializer.class) public class UpsertDataVectorIndexResponse { /** Result of the upsert or delete operation. */ - @JsonProperty("result") private UpsertDataResult result; /** Status of the upsert operation. */ - @JsonProperty("status") private UpsertDataStatus status; public UpsertDataVectorIndexResponse setResult(UpsertDataResult result) { @@ -55,4 +65,44 @@ public String toString() { .add("status", status) .toString(); } + + UpsertDataVectorIndexResponsePb toPb() { + UpsertDataVectorIndexResponsePb pb = new UpsertDataVectorIndexResponsePb(); + pb.setResult(result); + pb.setStatus(status); + + return pb; + } + + static UpsertDataVectorIndexResponse fromPb(UpsertDataVectorIndexResponsePb pb) { + UpsertDataVectorIndexResponse model = new UpsertDataVectorIndexResponse(); + model.setResult(pb.getResult()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class UpsertDataVectorIndexResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpsertDataVectorIndexResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpsertDataVectorIndexResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpsertDataVectorIndexResponseDeserializer + extends JsonDeserializer { + @Override + public UpsertDataVectorIndexResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpsertDataVectorIndexResponsePb pb = + mapper.readValue(p, UpsertDataVectorIndexResponsePb.class); + return UpsertDataVectorIndexResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponsePb.java new file mode 100755 index 000000000..02a07e233 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponsePb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpsertDataVectorIndexResponsePb { + @JsonProperty("result") + private UpsertDataResult result; + + @JsonProperty("status") + private UpsertDataStatus status; + + public UpsertDataVectorIndexResponsePb setResult(UpsertDataResult result) { + this.result = result; + return this; + } + + public UpsertDataResult getResult() { + return result; + } + + public UpsertDataVectorIndexResponsePb setStatus(UpsertDataStatus status) { + this.status = status; + return this; + } + + public UpsertDataStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpsertDataVectorIndexResponsePb that = (UpsertDataVectorIndexResponsePb) o; + return Objects.equals(result, that.result) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(result, status); + } + + @Override + public String toString() { + return new ToStringer(UpsertDataVectorIndexResponsePb.class) + .add("result", result) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java index 2c50b490a..921edb23d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java @@ -4,29 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Value.ValueSerializer.class) +@JsonDeserialize(using = Value.ValueDeserializer.class) public class Value { /** */ - @JsonProperty("bool_value") private Boolean boolValue; /** */ - @JsonProperty("list_value") private ListValue listValue; /** */ - @JsonProperty("number_value") private Double numberValue; /** */ - @JsonProperty("string_value") private String stringValue; /** */ - @JsonProperty("struct_value") private Struct structValue; public Value setBoolValue(Boolean boolValue) { @@ -101,4 +107,45 @@ public String toString() { .add("structValue", structValue) .toString(); } + + ValuePb toPb() { + ValuePb pb = new ValuePb(); + pb.setBoolValue(boolValue); + pb.setListValue(listValue); + pb.setNumberValue(numberValue); + pb.setStringValue(stringValue); + pb.setStructValue(structValue); + + return pb; + } + + static Value fromPb(ValuePb pb) { + Value model = new Value(); + model.setBoolValue(pb.getBoolValue()); + model.setListValue(pb.getListValue()); + model.setNumberValue(pb.getNumberValue()); + model.setStringValue(pb.getStringValue()); + model.setStructValue(pb.getStructValue()); + + return model; + } + + public static class ValueSerializer extends JsonSerializer { + @Override + public void serialize(Value value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ValuePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ValueDeserializer extends JsonDeserializer { + @Override + public Value deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ValuePb pb = mapper.readValue(p, ValuePb.class); + return Value.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ValuePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ValuePb.java new file mode 100755 index 000000000..46972b496 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ValuePb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ValuePb { + @JsonProperty("bool_value") + private Boolean boolValue; + + @JsonProperty("list_value") + private ListValue listValue; + + @JsonProperty("number_value") + private Double numberValue; + + @JsonProperty("string_value") + private String stringValue; + + @JsonProperty("struct_value") + private Struct structValue; + + public ValuePb setBoolValue(Boolean boolValue) { + this.boolValue = boolValue; + return this; + } + + public Boolean getBoolValue() { + return boolValue; + } + + public ValuePb setListValue(ListValue listValue) { + this.listValue = listValue; + return this; + } + + public ListValue getListValue() { + return listValue; + } + + public ValuePb setNumberValue(Double numberValue) { + this.numberValue = numberValue; + return this; + } + + public Double getNumberValue() { + return numberValue; + } + + public ValuePb setStringValue(String stringValue) { + this.stringValue = stringValue; + return this; + } + + public String getStringValue() { + return stringValue; + } + + public ValuePb setStructValue(Struct structValue) { + this.structValue = structValue; + return this; + } + + public Struct getStructValue() { + return structValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValuePb that = (ValuePb) o; + return Objects.equals(boolValue, that.boolValue) + && Objects.equals(listValue, that.listValue) + && Objects.equals(numberValue, that.numberValue) + && Objects.equals(stringValue, that.stringValue) + && Objects.equals(structValue, that.structValue); + } + + @Override + public int hashCode() { + return Objects.hash(boolValue, listValue, numberValue, stringValue, structValue); + } + + @Override + public String toString() { + return new ToStringer(ValuePb.class) + .add("boolValue", boolValue) + .add("listValue", listValue) + .add("numberValue", numberValue) + .add("stringValue", stringValue) + .add("structValue", structValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java index f5922390d..b536991e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = VectorIndex.VectorIndexSerializer.class) +@JsonDeserialize(using = VectorIndex.VectorIndexDeserializer.class) public class VectorIndex { /** The user who created the index. */ - @JsonProperty("creator") private String creator; /** */ - @JsonProperty("delta_sync_index_spec") private DeltaSyncVectorIndexSpecResponse deltaSyncIndexSpec; /** */ - @JsonProperty("direct_access_index_spec") private DirectAccessVectorIndexSpec directAccessIndexSpec; /** Name of the endpoint associated with the index */ - @JsonProperty("endpoint_name") private String endpointName; /** @@ -32,19 +39,15 @@ public class VectorIndex { * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages * index updates. */ - @JsonProperty("index_type") private VectorIndexType indexType; /** Name of the index */ - @JsonProperty("name") private String name; /** Primary key of the index */ - @JsonProperty("primary_key") private String primaryKey; /** */ - @JsonProperty("status") private VectorIndexStatus status; public VectorIndex setCreator(String creator) { @@ -160,4 +163,51 @@ public String toString() { .add("status", status) .toString(); } + + VectorIndexPb toPb() { + VectorIndexPb pb = new VectorIndexPb(); + pb.setCreator(creator); + pb.setDeltaSyncIndexSpec(deltaSyncIndexSpec); + pb.setDirectAccessIndexSpec(directAccessIndexSpec); + pb.setEndpointName(endpointName); + pb.setIndexType(indexType); + pb.setName(name); + pb.setPrimaryKey(primaryKey); + pb.setStatus(status); + + return pb; + } + + static VectorIndex fromPb(VectorIndexPb pb) { + VectorIndex model = new VectorIndex(); + model.setCreator(pb.getCreator()); + model.setDeltaSyncIndexSpec(pb.getDeltaSyncIndexSpec()); + model.setDirectAccessIndexSpec(pb.getDirectAccessIndexSpec()); + model.setEndpointName(pb.getEndpointName()); + model.setIndexType(pb.getIndexType()); + model.setName(pb.getName()); + model.setPrimaryKey(pb.getPrimaryKey()); + model.setStatus(pb.getStatus()); + + return model; + } + + public static class VectorIndexSerializer extends JsonSerializer { + @Override + public void serialize(VectorIndex value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VectorIndexPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VectorIndexDeserializer extends JsonDeserializer { + @Override + public VectorIndex deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VectorIndexPb pb = mapper.readValue(p, VectorIndexPb.class); + return VectorIndex.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexPb.java new file mode 100755 index 000000000..300d4022b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexPb.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class VectorIndexPb { + @JsonProperty("creator") + private String creator; + + @JsonProperty("delta_sync_index_spec") + private DeltaSyncVectorIndexSpecResponse deltaSyncIndexSpec; + + @JsonProperty("direct_access_index_spec") + private DirectAccessVectorIndexSpec directAccessIndexSpec; + + @JsonProperty("endpoint_name") + private String endpointName; + + @JsonProperty("index_type") + private VectorIndexType indexType; + + @JsonProperty("name") + private String name; + + @JsonProperty("primary_key") + private String primaryKey; + + @JsonProperty("status") + private VectorIndexStatus status; + + public VectorIndexPb setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public VectorIndexPb setDeltaSyncIndexSpec(DeltaSyncVectorIndexSpecResponse deltaSyncIndexSpec) { + this.deltaSyncIndexSpec = deltaSyncIndexSpec; + return this; + } + + public DeltaSyncVectorIndexSpecResponse getDeltaSyncIndexSpec() { + return deltaSyncIndexSpec; + } + + public VectorIndexPb setDirectAccessIndexSpec(DirectAccessVectorIndexSpec directAccessIndexSpec) { + this.directAccessIndexSpec = directAccessIndexSpec; + return this; + } + + public DirectAccessVectorIndexSpec getDirectAccessIndexSpec() { + return directAccessIndexSpec; + } + + public VectorIndexPb setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public VectorIndexPb setIndexType(VectorIndexType indexType) { + this.indexType = indexType; + return this; + } + + public VectorIndexType getIndexType() { + return indexType; + } + + public VectorIndexPb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public VectorIndexPb setPrimaryKey(String primaryKey) { + this.primaryKey = primaryKey; + return this; + } + + public String getPrimaryKey() { + return primaryKey; + } + + public VectorIndexPb setStatus(VectorIndexStatus status) { + this.status = status; + return this; + } + + public VectorIndexStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VectorIndexPb that = (VectorIndexPb) o; + return Objects.equals(creator, that.creator) + && Objects.equals(deltaSyncIndexSpec, that.deltaSyncIndexSpec) + && Objects.equals(directAccessIndexSpec, that.directAccessIndexSpec) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexType, that.indexType) + && Objects.equals(name, that.name) + && Objects.equals(primaryKey, that.primaryKey) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + creator, + deltaSyncIndexSpec, + directAccessIndexSpec, + endpointName, + indexType, + name, + primaryKey, + status); + } + + @Override + public String toString() { + return new ToStringer(VectorIndexPb.class) + .add("creator", creator) + .add("deltaSyncIndexSpec", deltaSyncIndexSpec) + .add("directAccessIndexSpec", directAccessIndexSpec) + .add("endpointName", endpointName) + .add("indexType", indexType) + .add("name", name) + .add("primaryKey", primaryKey) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java index 83868a47b..26c79c294 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = VectorIndexStatus.VectorIndexStatusSerializer.class) +@JsonDeserialize(using = VectorIndexStatus.VectorIndexStatusDeserializer.class) public class VectorIndexStatus { /** Index API Url to be used to perform operations on the index */ - @JsonProperty("index_url") private String indexUrl; /** Number of rows indexed */ - @JsonProperty("indexed_row_count") private Long indexedRowCount; /** Message associated with the index status */ - @JsonProperty("message") private String message; /** Whether the index is ready for search */ - @JsonProperty("ready") private Boolean ready; public VectorIndexStatus setIndexUrl(String indexUrl) { @@ -86,4 +93,44 @@ public String toString() { .add("ready", ready) .toString(); } + + VectorIndexStatusPb toPb() { + VectorIndexStatusPb pb = new VectorIndexStatusPb(); + pb.setIndexUrl(indexUrl); + pb.setIndexedRowCount(indexedRowCount); + pb.setMessage(message); + pb.setReady(ready); + + return pb; + } + + static VectorIndexStatus fromPb(VectorIndexStatusPb pb) { + VectorIndexStatus model = new VectorIndexStatus(); + model.setIndexUrl(pb.getIndexUrl()); + model.setIndexedRowCount(pb.getIndexedRowCount()); + model.setMessage(pb.getMessage()); + model.setReady(pb.getReady()); + + return model; + } + + public static class VectorIndexStatusSerializer extends JsonSerializer { + @Override + public void serialize(VectorIndexStatus value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + VectorIndexStatusPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class VectorIndexStatusDeserializer extends JsonDeserializer { + @Override + public VectorIndexStatus deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + VectorIndexStatusPb pb = mapper.readValue(p, VectorIndexStatusPb.class); + return VectorIndexStatus.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatusPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatusPb.java new file mode 100755 index 000000000..22febf6c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatusPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class VectorIndexStatusPb { + @JsonProperty("index_url") + private String indexUrl; + + @JsonProperty("indexed_row_count") + private Long indexedRowCount; + + @JsonProperty("message") + private String message; + + @JsonProperty("ready") + private Boolean ready; + + public VectorIndexStatusPb setIndexUrl(String indexUrl) { + this.indexUrl = indexUrl; + return this; + } + + public String getIndexUrl() { + return indexUrl; + } + + public VectorIndexStatusPb setIndexedRowCount(Long indexedRowCount) { + this.indexedRowCount = indexedRowCount; + return this; + } + + public Long getIndexedRowCount() { + return indexedRowCount; + } + + public VectorIndexStatusPb setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public VectorIndexStatusPb setReady(Boolean ready) { + this.ready = ready; + return this; + } + + public Boolean getReady() { + return ready; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VectorIndexStatusPb that = (VectorIndexStatusPb) o; + return Objects.equals(indexUrl, that.indexUrl) + && Objects.equals(indexedRowCount, that.indexedRowCount) + && Objects.equals(message, that.message) + && Objects.equals(ready, that.ready); + } + + @Override + public int hashCode() { + return Objects.hash(indexUrl, indexedRowCount, message, ready); + } + + @Override + public String toString() { + return new ToStringer(VectorIndexStatusPb.class) + .add("indexUrl", indexUrl) + .add("indexedRowCount", indexedRowCount) + .add("message", message) + .add("ready", ready) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java index a79ee43ba..c03a2b006 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java @@ -21,7 +21,7 @@ public EndpointInfo createEndpoint(CreateEndpoint request) { String path = "/api/2.0/vector-search/endpoints"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, EndpointInfo.class); @@ -35,7 +35,7 @@ public void deleteEndpoint(DeleteEndpointRequest request) { String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteEndpointResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public EndpointInfo getEndpoint(GetEndpointRequest request) { String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, EndpointInfo.class); } catch (IOException e) { @@ -61,7 +61,7 @@ public ListEndpointResponse listEndpoints(ListEndpointsRequest request) { String path = "/api/2.0/vector-search/endpoints"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListEndpointResponse.class); } catch (IOException e) { @@ -77,7 +77,7 @@ public PatchEndpointBudgetPolicyResponse updateEndpointBudgetPolicy( "/api/2.0/vector-search/endpoints/%s/budget-policy", request.getEndpointName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, PatchEndpointBudgetPolicyResponse.class); @@ -93,7 +93,7 @@ public UpdateEndpointCustomTagsResponse updateEndpointCustomTags( String.format("/api/2.0/vector-search/endpoints/%s/tags", request.getEndpointName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpdateEndpointCustomTagsResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java index 7933132f3..76c20db1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java @@ -21,7 +21,7 @@ public VectorIndex createIndex(CreateVectorIndexRequest request) { String path = "/api/2.0/vector-search/indexes"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, VectorIndex.class); @@ -36,7 +36,7 @@ public DeleteDataVectorIndexResponse deleteDataVectorIndex(DeleteDataVectorIndex String.format("/api/2.0/vector-search/indexes/%s/delete-data", request.getIndexName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, DeleteDataVectorIndexResponse.class); } catch (IOException e) { @@ -49,7 +49,7 @@ public void deleteIndex(DeleteIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s", request.getIndexName()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteIndexResponse.class); } catch (IOException e) { @@ -62,7 +62,7 @@ public VectorIndex getIndex(GetIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s", request.getIndexName()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, VectorIndex.class); } catch (IOException e) { @@ -75,7 +75,7 @@ public ListVectorIndexesResponse listIndexes(ListIndexesRequest request) { String path = "/api/2.0/vector-search/indexes"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListVectorIndexesResponse.class); } catch (IOException e) { @@ -88,7 +88,7 @@ public QueryVectorIndexResponse queryIndex(QueryVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/query", request.getIndexName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, QueryVectorIndexResponse.class); @@ -103,7 +103,7 @@ public QueryVectorIndexResponse queryNextPage(QueryVectorIndexNextPageRequest re String.format("/api/2.0/vector-search/indexes/%s/query-next-page", request.getIndexName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, QueryVectorIndexResponse.class); @@ -117,7 +117,7 @@ public ScanVectorIndexResponse scanIndex(ScanVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/scan", request.getIndexName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, ScanVectorIndexResponse.class); @@ -131,7 +131,7 @@ public void syncIndex(SyncIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/sync", request.getIndexName()); try { Request req = new Request("POST", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, SyncIndexResponse.class); } catch (IOException e) { @@ -145,7 +145,7 @@ public UpsertDataVectorIndexResponse upsertDataVectorIndex(UpsertDataVectorIndex String.format("/api/2.0/vector-search/indexes/%s/upsert-data", request.getIndexName()); try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, UpsertDataVectorIndexResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java index 932d9ace1..39f4078cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = AclItem.AclItemSerializer.class) +@JsonDeserialize(using = AclItem.AclItemDeserializer.class) public class AclItem { /** The permission level applied to the principal. */ - @JsonProperty("permission") private AclPermission permission; /** The principal in which the permission is applied. */ - @JsonProperty("principal") private String principal; public AclItem setPermission(AclPermission permission) { @@ -55,4 +64,39 @@ public String toString() { .add("principal", principal) .toString(); } + + AclItemPb toPb() { + AclItemPb pb = new AclItemPb(); + pb.setPermission(permission); + pb.setPrincipal(principal); + + return pb; + } + + static AclItem fromPb(AclItemPb pb) { + AclItem model = new AclItem(); + model.setPermission(pb.getPermission()); + model.setPrincipal(pb.getPrincipal()); + + return model; + } + + public static class AclItemSerializer extends JsonSerializer { + @Override + public void serialize(AclItem value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AclItemPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AclItemDeserializer extends JsonDeserializer { + @Override + public AclItem deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AclItemPb pb = mapper.readValue(p, AclItemPb.class); + return AclItem.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItemPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItemPb.java new file mode 100755 index 000000000..efe89379a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItemPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AclItemPb { + @JsonProperty("permission") + private AclPermission permission; + + @JsonProperty("principal") + private String principal; + + public AclItemPb setPermission(AclPermission permission) { + this.permission = permission; + return this; + } + + public AclPermission getPermission() { + return permission; + } + + public AclItemPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AclItemPb that = (AclItemPb) o; + return Objects.equals(permission, that.permission) && Objects.equals(principal, that.principal); + } + + @Override + public int hashCode() { + return Objects.hash(permission, principal); + } + + @Override + public String toString() { + return new ToStringer(AclItemPb.class) + .add("permission", permission) + .add("principal", principal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java index a2acbeb21..f682b479f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java @@ -4,17 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = AzureKeyVaultSecretScopeMetadata.AzureKeyVaultSecretScopeMetadataSerializer.class) +@JsonDeserialize( + using = AzureKeyVaultSecretScopeMetadata.AzureKeyVaultSecretScopeMetadataDeserializer.class) public class AzureKeyVaultSecretScopeMetadata { /** The DNS of the KeyVault */ - @JsonProperty("dns_name") private String dnsName; /** The resource id of the azure KeyVault that user wants to associate the scope with. */ - @JsonProperty("resource_id") private String resourceId; public AzureKeyVaultSecretScopeMetadata setDnsName(String dnsName) { @@ -55,4 +66,44 @@ public String toString() { .add("resourceId", resourceId) .toString(); } + + AzureKeyVaultSecretScopeMetadataPb toPb() { + AzureKeyVaultSecretScopeMetadataPb pb = new AzureKeyVaultSecretScopeMetadataPb(); + pb.setDnsName(dnsName); + pb.setResourceId(resourceId); + + return pb; + } + + static AzureKeyVaultSecretScopeMetadata fromPb(AzureKeyVaultSecretScopeMetadataPb pb) { + AzureKeyVaultSecretScopeMetadata model = new AzureKeyVaultSecretScopeMetadata(); + model.setDnsName(pb.getDnsName()); + model.setResourceId(pb.getResourceId()); + + return model; + } + + public static class AzureKeyVaultSecretScopeMetadataSerializer + extends JsonSerializer { + @Override + public void serialize( + AzureKeyVaultSecretScopeMetadata value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + AzureKeyVaultSecretScopeMetadataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class AzureKeyVaultSecretScopeMetadataDeserializer + extends JsonDeserializer { + @Override + public AzureKeyVaultSecretScopeMetadata deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + AzureKeyVaultSecretScopeMetadataPb pb = + mapper.readValue(p, AzureKeyVaultSecretScopeMetadataPb.class); + return AzureKeyVaultSecretScopeMetadata.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadataPb.java new file mode 100755 index 000000000..c05c7e704 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadataPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class AzureKeyVaultSecretScopeMetadataPb { + @JsonProperty("dns_name") + private String dnsName; + + @JsonProperty("resource_id") + private String resourceId; + + public AzureKeyVaultSecretScopeMetadataPb setDnsName(String dnsName) { + this.dnsName = dnsName; + return this; + } + + public String getDnsName() { + return dnsName; + } + + public AzureKeyVaultSecretScopeMetadataPb setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureKeyVaultSecretScopeMetadataPb that = (AzureKeyVaultSecretScopeMetadataPb) o; + return Objects.equals(dnsName, that.dnsName) && Objects.equals(resourceId, that.resourceId); + } + + @Override + public int hashCode() { + return Objects.hash(dnsName, resourceId); + } + + @Override + public String toString() { + return new ToStringer(AzureKeyVaultSecretScopeMetadataPb.class) + .add("dnsName", dnsName) + .add("resourceId", resourceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Converters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Converters.java new file mode 100755 index 000000000..830f91d68 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Converters.java @@ -0,0 +1,59 @@ +package com.databricks.sdk.service.workspace; + +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; + +class Converters { + + private static final long MAX_SECONDS = 315576000000L; + private static final long MIN_SECONDS = -315576000000L; + + static Duration durationFromPb(com.google.protobuf.Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + return Duration.ofSeconds(duration.getSeconds(), duration.getNanos()); + } + + static com.google.protobuf.Duration durationToPb(Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); + // Validate that nanoseconds fit in an int + if (duration.getSeconds() < MIN_SECONDS || duration.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException("Duration seconds out of range: " + duration.getSeconds()); + } + return com.google.protobuf.Duration.newBuilder() + .setSeconds(duration.getSeconds()) + .setNanos(duration.getNano()) + .build(); + } + + static Timestamp instantToPb(Instant instant) { + Objects.requireNonNull(instant, "instant must not be null"); + return Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + } + + static Instant instantFromPb(Timestamp timestamp) { + Objects.requireNonNull(timestamp, "timestamp must not be null"); + // Validate that nanoseconds fit in an int + if (timestamp.getSeconds() < MIN_SECONDS || timestamp.getSeconds() > MAX_SECONDS) { + throw new IllegalArgumentException( + "Timestamp seconds out of range: " + timestamp.getSeconds()); + } + return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); + } + + static FieldMask fieldMaskToPb(List fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return FieldMask.newBuilder().addAllPaths(fieldMask).build(); + } + + static List fieldMaskFromPb(com.google.protobuf.FieldMask fieldMask) { + Objects.requireNonNull(fieldMask, "fieldMask must not be null"); + return fieldMask.getPathsList(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java index fb02f7bf7..fff9e7b9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java @@ -4,17 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCredentialsRequest.CreateCredentialsRequestSerializer.class) +@JsonDeserialize(using = CreateCredentialsRequest.CreateCredentialsRequestDeserializer.class) public class CreateCredentialsRequest { /** * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ - @JsonProperty("git_provider") private String gitProvider; /** @@ -25,7 +35,6 @@ public class CreateCredentialsRequest { * please see your provider's Personal Access Token authentication documentation to see what is * supported. */ - @JsonProperty("git_username") private String gitUsername; /** @@ -34,7 +43,6 @@ public class CreateCredentialsRequest { * *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html */ - @JsonProperty("personal_access_token") private String personalAccessToken; public CreateCredentialsRequest setGitProvider(String gitProvider) { @@ -87,4 +95,45 @@ public String toString() { .add("personalAccessToken", personalAccessToken) .toString(); } + + CreateCredentialsRequestPb toPb() { + CreateCredentialsRequestPb pb = new CreateCredentialsRequestPb(); + pb.setGitProvider(gitProvider); + pb.setGitUsername(gitUsername); + pb.setPersonalAccessToken(personalAccessToken); + + return pb; + } + + static CreateCredentialsRequest fromPb(CreateCredentialsRequestPb pb) { + CreateCredentialsRequest model = new CreateCredentialsRequest(); + model.setGitProvider(pb.getGitProvider()); + model.setGitUsername(pb.getGitUsername()); + model.setPersonalAccessToken(pb.getPersonalAccessToken()); + + return model; + } + + public static class CreateCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialsRequestPb pb = mapper.readValue(p, CreateCredentialsRequestPb.class); + return CreateCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequestPb.java new file mode 100755 index 000000000..c96805a93 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequestPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialsRequestPb { + @JsonProperty("git_provider") + private String gitProvider; + + @JsonProperty("git_username") + private String gitUsername; + + @JsonProperty("personal_access_token") + private String personalAccessToken; + + public CreateCredentialsRequestPb setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public CreateCredentialsRequestPb setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; + return this; + } + + public String getGitUsername() { + return gitUsername; + } + + public CreateCredentialsRequestPb setPersonalAccessToken(String personalAccessToken) { + this.personalAccessToken = personalAccessToken; + return this; + } + + public String getPersonalAccessToken() { + return personalAccessToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialsRequestPb that = (CreateCredentialsRequestPb) o; + return Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername) + && Objects.equals(personalAccessToken, that.personalAccessToken); + } + + @Override + public int hashCode() { + return Objects.hash(gitProvider, gitUsername, personalAccessToken); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialsRequestPb.class) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .add("personalAccessToken", personalAccessToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java index d50aae0cb..66415aeea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateCredentialsResponse.CreateCredentialsResponseSerializer.class) +@JsonDeserialize(using = CreateCredentialsResponse.CreateCredentialsResponseDeserializer.class) public class CreateCredentialsResponse { /** ID of the credential object in the workspace. */ - @JsonProperty("credential_id") private Long credentialId; /** The Git provider associated with the credential. */ - @JsonProperty("git_provider") private String gitProvider; /** * The username or email provided with your Git provider account and associated with the * credential. */ - @JsonProperty("git_username") private String gitUsername; public CreateCredentialsResponse setCredentialId(Long credentialId) { @@ -74,4 +82,45 @@ public String toString() { .add("gitUsername", gitUsername) .toString(); } + + CreateCredentialsResponsePb toPb() { + CreateCredentialsResponsePb pb = new CreateCredentialsResponsePb(); + pb.setCredentialId(credentialId); + pb.setGitProvider(gitProvider); + pb.setGitUsername(gitUsername); + + return pb; + } + + static CreateCredentialsResponse fromPb(CreateCredentialsResponsePb pb) { + CreateCredentialsResponse model = new CreateCredentialsResponse(); + model.setCredentialId(pb.getCredentialId()); + model.setGitProvider(pb.getGitProvider()); + model.setGitUsername(pb.getGitUsername()); + + return model; + } + + public static class CreateCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + CreateCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public CreateCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateCredentialsResponsePb pb = mapper.readValue(p, CreateCredentialsResponsePb.class); + return CreateCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponsePb.java new file mode 100755 index 000000000..99dc08045 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateCredentialsResponsePb { + @JsonProperty("credential_id") + private Long credentialId; + + @JsonProperty("git_provider") + private String gitProvider; + + @JsonProperty("git_username") + private String gitUsername; + + public CreateCredentialsResponsePb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + public CreateCredentialsResponsePb setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public CreateCredentialsResponsePb setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; + return this; + } + + public String getGitUsername() { + return gitUsername; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialsResponsePb that = (CreateCredentialsResponsePb) o; + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, gitProvider, gitUsername); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialsResponsePb.class) + .add("credentialId", credentialId) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java index 84cd6aa77..d1f1b5a75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRepoRequest.CreateRepoRequestSerializer.class) +@JsonDeserialize(using = CreateRepoRequest.CreateRepoRequestDeserializer.class) public class CreateRepoRequest { /** * Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If * repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`. */ - @JsonProperty("path") private String path; /** @@ -21,18 +31,15 @@ public class CreateRepoRequest { * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ - @JsonProperty("provider") private String provider; /** * If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable * sparse checkout after the repo is created. */ - @JsonProperty("sparse_checkout") private SparseCheckout sparseCheckout; /** URL of the Git repository to be linked. */ - @JsonProperty("url") private String url; public CreateRepoRequest setPath(String path) { @@ -96,4 +103,44 @@ public String toString() { .add("url", url) .toString(); } + + CreateRepoRequestPb toPb() { + CreateRepoRequestPb pb = new CreateRepoRequestPb(); + pb.setPath(path); + pb.setProvider(provider); + pb.setSparseCheckout(sparseCheckout); + pb.setUrl(url); + + return pb; + } + + static CreateRepoRequest fromPb(CreateRepoRequestPb pb) { + CreateRepoRequest model = new CreateRepoRequest(); + model.setPath(pb.getPath()); + model.setProvider(pb.getProvider()); + model.setSparseCheckout(pb.getSparseCheckout()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class CreateRepoRequestSerializer extends JsonSerializer { + @Override + public void serialize(CreateRepoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRepoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRepoRequestDeserializer extends JsonDeserializer { + @Override + public CreateRepoRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRepoRequestPb pb = mapper.readValue(p, CreateRepoRequestPb.class); + return CreateRepoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequestPb.java new file mode 100755 index 000000000..043283935 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateRepoRequestPb { + @JsonProperty("path") + private String path; + + @JsonProperty("provider") + private String provider; + + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + @JsonProperty("url") + private String url; + + public CreateRepoRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public CreateRepoRequestPb setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public CreateRepoRequestPb setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public CreateRepoRequestPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRepoRequestPb that = (CreateRepoRequestPb) o; + return Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(CreateRepoRequestPb.class) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java index cb86465a7..f5e98f377 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateRepoResponse.CreateRepoResponseSerializer.class) +@JsonDeserialize(using = CreateRepoResponse.CreateRepoResponseDeserializer.class) public class CreateRepoResponse { /** Branch that the Git folder (repo) is checked out to. */ - @JsonProperty("branch") private String branch; /** SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo). */ - @JsonProperty("head_commit_id") private String headCommitId; /** ID of the Git folder (repo) object in the workspace. */ - @JsonProperty("id") private Long id; /** Path of the Git folder (repo) in the workspace. */ - @JsonProperty("path") private String path; /** Git provider of the linked Git repository. */ - @JsonProperty("provider") private String provider; /** Sparse checkout settings for the Git folder (repo). */ - @JsonProperty("sparse_checkout") private SparseCheckout sparseCheckout; /** URL of the linked Git repository. */ - @JsonProperty("url") private String url; public CreateRepoResponse setBranch(String branch) { @@ -131,4 +135,50 @@ public String toString() { .add("url", url) .toString(); } + + CreateRepoResponsePb toPb() { + CreateRepoResponsePb pb = new CreateRepoResponsePb(); + pb.setBranch(branch); + pb.setHeadCommitId(headCommitId); + pb.setId(id); + pb.setPath(path); + pb.setProvider(provider); + pb.setSparseCheckout(sparseCheckout); + pb.setUrl(url); + + return pb; + } + + static CreateRepoResponse fromPb(CreateRepoResponsePb pb) { + CreateRepoResponse model = new CreateRepoResponse(); + model.setBranch(pb.getBranch()); + model.setHeadCommitId(pb.getHeadCommitId()); + model.setId(pb.getId()); + model.setPath(pb.getPath()); + model.setProvider(pb.getProvider()); + model.setSparseCheckout(pb.getSparseCheckout()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class CreateRepoResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateRepoResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateRepoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateRepoResponseDeserializer extends JsonDeserializer { + @Override + public CreateRepoResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateRepoResponsePb pb = mapper.readValue(p, CreateRepoResponsePb.class); + return CreateRepoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponsePb.java new file mode 100755 index 000000000..3d5bd0874 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponsePb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateRepoResponsePb { + @JsonProperty("branch") + private String branch; + + @JsonProperty("head_commit_id") + private String headCommitId; + + @JsonProperty("id") + private Long id; + + @JsonProperty("path") + private String path; + + @JsonProperty("provider") + private String provider; + + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + @JsonProperty("url") + private String url; + + public CreateRepoResponsePb setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public CreateRepoResponsePb setHeadCommitId(String headCommitId) { + this.headCommitId = headCommitId; + return this; + } + + public String getHeadCommitId() { + return headCommitId; + } + + public CreateRepoResponsePb setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public CreateRepoResponsePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public CreateRepoResponsePb setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public CreateRepoResponsePb setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public CreateRepoResponsePb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRepoResponsePb that = (CreateRepoResponsePb) o; + return Objects.equals(branch, that.branch) + && Objects.equals(headCommitId, that.headCommitId) + && Objects.equals(id, that.id) + && Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(CreateRepoResponsePb.class) + .add("branch", branch) + .add("headCommitId", headCommitId) + .add("id", id) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java index afee4ed59..3b544b634 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java @@ -4,27 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateScope.CreateScopeSerializer.class) +@JsonDeserialize(using = CreateScope.CreateScopeDeserializer.class) public class CreateScope { /** The metadata for the secret scope if the type is `AZURE_KEYVAULT` */ - @JsonProperty("backend_azure_keyvault") private AzureKeyVaultSecretScopeMetadata backendAzureKeyvault; /** The principal that is initially granted `MANAGE` permission to the created scope. */ - @JsonProperty("initial_manage_principal") private String initialManagePrincipal; /** Scope name requested by the user. Scope names are unique. */ - @JsonProperty("scope") private String scope; /** * The backend type the scope will be created with. If not specified, will default to `DATABRICKS` */ - @JsonProperty("scope_backend_type") private ScopeBackendType scopeBackendType; public CreateScope setBackendAzureKeyvault( @@ -89,4 +96,43 @@ public String toString() { .add("scopeBackendType", scopeBackendType) .toString(); } + + CreateScopePb toPb() { + CreateScopePb pb = new CreateScopePb(); + pb.setBackendAzureKeyvault(backendAzureKeyvault); + pb.setInitialManagePrincipal(initialManagePrincipal); + pb.setScope(scope); + pb.setScopeBackendType(scopeBackendType); + + return pb; + } + + static CreateScope fromPb(CreateScopePb pb) { + CreateScope model = new CreateScope(); + model.setBackendAzureKeyvault(pb.getBackendAzureKeyvault()); + model.setInitialManagePrincipal(pb.getInitialManagePrincipal()); + model.setScope(pb.getScope()); + model.setScopeBackendType(pb.getScopeBackendType()); + + return model; + } + + public static class CreateScopeSerializer extends JsonSerializer { + @Override + public void serialize(CreateScope value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateScopePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateScopeDeserializer extends JsonDeserializer { + @Override + public CreateScope deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateScopePb pb = mapper.readValue(p, CreateScopePb.class); + return CreateScope.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopePb.java new file mode 100755 index 000000000..52588f50a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopePb.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CreateScopePb { + @JsonProperty("backend_azure_keyvault") + private AzureKeyVaultSecretScopeMetadata backendAzureKeyvault; + + @JsonProperty("initial_manage_principal") + private String initialManagePrincipal; + + @JsonProperty("scope") + private String scope; + + @JsonProperty("scope_backend_type") + private ScopeBackendType scopeBackendType; + + public CreateScopePb setBackendAzureKeyvault( + AzureKeyVaultSecretScopeMetadata backendAzureKeyvault) { + this.backendAzureKeyvault = backendAzureKeyvault; + return this; + } + + public AzureKeyVaultSecretScopeMetadata getBackendAzureKeyvault() { + return backendAzureKeyvault; + } + + public CreateScopePb setInitialManagePrincipal(String initialManagePrincipal) { + this.initialManagePrincipal = initialManagePrincipal; + return this; + } + + public String getInitialManagePrincipal() { + return initialManagePrincipal; + } + + public CreateScopePb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + public CreateScopePb setScopeBackendType(ScopeBackendType scopeBackendType) { + this.scopeBackendType = scopeBackendType; + return this; + } + + public ScopeBackendType getScopeBackendType() { + return scopeBackendType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateScopePb that = (CreateScopePb) o; + return Objects.equals(backendAzureKeyvault, that.backendAzureKeyvault) + && Objects.equals(initialManagePrincipal, that.initialManagePrincipal) + && Objects.equals(scope, that.scope) + && Objects.equals(scopeBackendType, that.scopeBackendType); + } + + @Override + public int hashCode() { + return Objects.hash(backendAzureKeyvault, initialManagePrincipal, scope, scopeBackendType); + } + + @Override + public String toString() { + return new ToStringer(CreateScopePb.class) + .add("backendAzureKeyvault", backendAzureKeyvault) + .add("initialManagePrincipal", initialManagePrincipal) + .add("scope", scope) + .add("scopeBackendType", scopeBackendType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java index bbfc3ed9c..754e9464c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CreateScopeResponse.CreateScopeResponseSerializer.class) +@JsonDeserialize(using = CreateScopeResponse.CreateScopeResponseDeserializer.class) public class CreateScopeResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(CreateScopeResponse.class).toString(); } + + CreateScopeResponsePb toPb() { + CreateScopeResponsePb pb = new CreateScopeResponsePb(); + + return pb; + } + + static CreateScopeResponse fromPb(CreateScopeResponsePb pb) { + CreateScopeResponse model = new CreateScopeResponse(); + + return model; + } + + public static class CreateScopeResponseSerializer extends JsonSerializer { + @Override + public void serialize(CreateScopeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CreateScopeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CreateScopeResponseDeserializer + extends JsonDeserializer { + @Override + public CreateScopeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CreateScopeResponsePb pb = mapper.readValue(p, CreateScopeResponsePb.class); + return CreateScopeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponsePb.java new file mode 100755 index 000000000..1ede37ab5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class CreateScopeResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(CreateScopeResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java index ebb736500..b94fa3cc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = CredentialInfo.CredentialInfoSerializer.class) +@JsonDeserialize(using = CredentialInfo.CredentialInfoDeserializer.class) public class CredentialInfo { /** ID of the credential object in the workspace. */ - @JsonProperty("credential_id") private Long credentialId; /** The Git provider associated with the credential. */ - @JsonProperty("git_provider") private String gitProvider; /** * The username or email provided with your Git provider account and associated with the * credential. */ - @JsonProperty("git_username") private String gitUsername; public CredentialInfo setCredentialId(Long credentialId) { @@ -74,4 +82,42 @@ public String toString() { .add("gitUsername", gitUsername) .toString(); } + + CredentialInfoPb toPb() { + CredentialInfoPb pb = new CredentialInfoPb(); + pb.setCredentialId(credentialId); + pb.setGitProvider(gitProvider); + pb.setGitUsername(gitUsername); + + return pb; + } + + static CredentialInfo fromPb(CredentialInfoPb pb) { + CredentialInfo model = new CredentialInfo(); + model.setCredentialId(pb.getCredentialId()); + model.setGitProvider(pb.getGitProvider()); + model.setGitUsername(pb.getGitUsername()); + + return model; + } + + public static class CredentialInfoSerializer extends JsonSerializer { + @Override + public void serialize(CredentialInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + CredentialInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class CredentialInfoDeserializer extends JsonDeserializer { + @Override + public CredentialInfo deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + CredentialInfoPb pb = mapper.readValue(p, CredentialInfoPb.class); + return CredentialInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfoPb.java new file mode 100755 index 000000000..0af528b5f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfoPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class CredentialInfoPb { + @JsonProperty("credential_id") + private Long credentialId; + + @JsonProperty("git_provider") + private String gitProvider; + + @JsonProperty("git_username") + private String gitUsername; + + public CredentialInfoPb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + public CredentialInfoPb setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public CredentialInfoPb setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; + return this; + } + + public String getGitUsername() { + return gitUsername; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialInfoPb that = (CredentialInfoPb) o; + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, gitProvider, gitUsername); + } + + @Override + public String toString() { + return new ToStringer(CredentialInfoPb.class) + .add("credentialId", credentialId) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java index 8608d5880..1591c28f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Delete.DeleteSerializer.class) +@JsonDeserialize(using = Delete.DeleteDeserializer.class) public class Delete { /** The absolute path of the notebook or directory. */ - @JsonProperty("path") private String path; /** @@ -18,7 +28,6 @@ public class Delete { * Please note this deleting directory is not atomic. If it fails in the middle, some of objects * under this directory may be deleted and cannot be undone. */ - @JsonProperty("recursive") private Boolean recursive; public Delete setPath(String path) { @@ -56,4 +65,39 @@ public int hashCode() { public String toString() { return new ToStringer(Delete.class).add("path", path).add("recursive", recursive).toString(); } + + DeletePb toPb() { + DeletePb pb = new DeletePb(); + pb.setPath(path); + pb.setRecursive(recursive); + + return pb; + } + + static Delete fromPb(DeletePb pb) { + Delete model = new Delete(); + model.setPath(pb.getPath()); + model.setRecursive(pb.getRecursive()); + + return model; + } + + public static class DeleteSerializer extends JsonSerializer { + @Override + public void serialize(Delete value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeletePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteDeserializer extends JsonDeserializer { + @Override + public Delete deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeletePb pb = mapper.readValue(p, DeletePb.class); + return Delete.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java index c47ba9219..38d1986e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteAcl.DeleteAclSerializer.class) +@JsonDeserialize(using = DeleteAcl.DeleteAclDeserializer.class) public class DeleteAcl { /** The principal to remove an existing ACL from. */ - @JsonProperty("principal") private String principal; /** The name of the scope to remove permissions from. */ - @JsonProperty("scope") private String scope; public DeleteAcl setPrincipal(String principal) { @@ -55,4 +64,39 @@ public String toString() { .add("scope", scope) .toString(); } + + DeleteAclPb toPb() { + DeleteAclPb pb = new DeleteAclPb(); + pb.setPrincipal(principal); + pb.setScope(scope); + + return pb; + } + + static DeleteAcl fromPb(DeleteAclPb pb) { + DeleteAcl model = new DeleteAcl(); + model.setPrincipal(pb.getPrincipal()); + model.setScope(pb.getScope()); + + return model; + } + + public static class DeleteAclSerializer extends JsonSerializer { + @Override + public void serialize(DeleteAcl value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAclPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAclDeserializer extends JsonDeserializer { + @Override + public DeleteAcl deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAclPb pb = mapper.readValue(p, DeleteAclPb.class); + return DeleteAcl.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclPb.java new file mode 100755 index 000000000..f97bedf36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteAclPb { + @JsonProperty("principal") + private String principal; + + @JsonProperty("scope") + private String scope; + + public DeleteAclPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public DeleteAclPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAclPb that = (DeleteAclPb) o; + return Objects.equals(principal, that.principal) && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(principal, scope); + } + + @Override + public String toString() { + return new ToStringer(DeleteAclPb.class) + .add("principal", principal) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java index e02226127..b133949ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteAclResponse.DeleteAclResponseSerializer.class) +@JsonDeserialize(using = DeleteAclResponse.DeleteAclResponseDeserializer.class) public class DeleteAclResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteAclResponse.class).toString(); } + + DeleteAclResponsePb toPb() { + DeleteAclResponsePb pb = new DeleteAclResponsePb(); + + return pb; + } + + static DeleteAclResponse fromPb(DeleteAclResponsePb pb) { + DeleteAclResponse model = new DeleteAclResponse(); + + return model; + } + + public static class DeleteAclResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteAclResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteAclResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteAclResponseDeserializer extends JsonDeserializer { + @Override + public DeleteAclResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteAclResponsePb pb = mapper.readValue(p, DeleteAclResponsePb.class); + return DeleteAclResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponsePb.java new file mode 100755 index 000000000..78825f1dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteAclResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteAclResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java index 103c730f8..211cf011e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a credential */ @Generated +@JsonSerialize(using = DeleteCredentialsRequest.DeleteCredentialsRequestSerializer.class) +@JsonDeserialize(using = DeleteCredentialsRequest.DeleteCredentialsRequestDeserializer.class) public class DeleteCredentialsRequest { /** The ID for the corresponding credential to access. */ - @JsonIgnore private Long credentialId; + private Long credentialId; public DeleteCredentialsRequest setCredentialId(Long credentialId) { this.credentialId = credentialId; @@ -41,4 +52,41 @@ public String toString() { .add("credentialId", credentialId) .toString(); } + + DeleteCredentialsRequestPb toPb() { + DeleteCredentialsRequestPb pb = new DeleteCredentialsRequestPb(); + pb.setCredentialId(credentialId); + + return pb; + } + + static DeleteCredentialsRequest fromPb(DeleteCredentialsRequestPb pb) { + DeleteCredentialsRequest model = new DeleteCredentialsRequest(); + model.setCredentialId(pb.getCredentialId()); + + return model; + } + + public static class DeleteCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public DeleteCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCredentialsRequestPb pb = mapper.readValue(p, DeleteCredentialsRequestPb.class); + return DeleteCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequestPb.java new file mode 100755 index 000000000..0c56ffce7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a credential */ +@Generated +class DeleteCredentialsRequestPb { + @JsonIgnore private Long credentialId; + + public DeleteCredentialsRequestPb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCredentialsRequestPb that = (DeleteCredentialsRequestPb) o; + return Objects.equals(credentialId, that.credentialId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialsRequestPb.class) + .add("credentialId", credentialId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java index 3b1fb2ec7..b2acdb2fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteCredentialsResponse.DeleteCredentialsResponseSerializer.class) +@JsonDeserialize(using = DeleteCredentialsResponse.DeleteCredentialsResponseDeserializer.class) public class DeleteCredentialsResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteCredentialsResponse.class).toString(); } + + DeleteCredentialsResponsePb toPb() { + DeleteCredentialsResponsePb pb = new DeleteCredentialsResponsePb(); + + return pb; + } + + static DeleteCredentialsResponse fromPb(DeleteCredentialsResponsePb pb) { + DeleteCredentialsResponse model = new DeleteCredentialsResponse(); + + return model; + } + + public static class DeleteCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + DeleteCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteCredentialsResponsePb pb = mapper.readValue(p, DeleteCredentialsResponsePb.class); + return DeleteCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponsePb.java new file mode 100755 index 000000000..504401fde --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteCredentialsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeletePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeletePb.java new file mode 100755 index 000000000..9d08d3a97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeletePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeletePb { + @JsonProperty("path") + private String path; + + @JsonProperty("recursive") + private Boolean recursive; + + public DeletePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public DeletePb setRecursive(Boolean recursive) { + this.recursive = recursive; + return this; + } + + public Boolean getRecursive() { + return recursive; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeletePb that = (DeletePb) o; + return Objects.equals(path, that.path) && Objects.equals(recursive, that.recursive); + } + + @Override + public int hashCode() { + return Objects.hash(path, recursive); + } + + @Override + public String toString() { + return new ToStringer(DeletePb.class).add("path", path).add("recursive", recursive).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java index ceeaf005e..39aa48a27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Delete a repo */ @Generated +@JsonSerialize(using = DeleteRepoRequest.DeleteRepoRequestSerializer.class) +@JsonDeserialize(using = DeleteRepoRequest.DeleteRepoRequestDeserializer.class) public class DeleteRepoRequest { /** The ID for the corresponding repo to delete. */ - @JsonIgnore private Long repoId; + private Long repoId; public DeleteRepoRequest setRepoId(Long repoId) { this.repoId = repoId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRepoRequest.class).add("repoId", repoId).toString(); } + + DeleteRepoRequestPb toPb() { + DeleteRepoRequestPb pb = new DeleteRepoRequestPb(); + pb.setRepoId(repoId); + + return pb; + } + + static DeleteRepoRequest fromPb(DeleteRepoRequestPb pb) { + DeleteRepoRequest model = new DeleteRepoRequest(); + model.setRepoId(pb.getRepoId()); + + return model; + } + + public static class DeleteRepoRequestSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRepoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRepoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRepoRequestDeserializer extends JsonDeserializer { + @Override + public DeleteRepoRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRepoRequestPb pb = mapper.readValue(p, DeleteRepoRequestPb.class); + return DeleteRepoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequestPb.java new file mode 100755 index 000000000..14ae6e1e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a repo */ +@Generated +class DeleteRepoRequestPb { + @JsonIgnore private Long repoId; + + public DeleteRepoRequestPb setRepoId(Long repoId) { + this.repoId = repoId; + return this; + } + + public Long getRepoId() { + return repoId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRepoRequestPb that = (DeleteRepoRequestPb) o; + return Objects.equals(repoId, that.repoId); + } + + @Override + public int hashCode() { + return Objects.hash(repoId); + } + + @Override + public String toString() { + return new ToStringer(DeleteRepoRequestPb.class).add("repoId", repoId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java index 2fafce6f5..0fc723e31 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteRepoResponse.DeleteRepoResponseSerializer.class) +@JsonDeserialize(using = DeleteRepoResponse.DeleteRepoResponseDeserializer.class) public class DeleteRepoResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteRepoResponse.class).toString(); } + + DeleteRepoResponsePb toPb() { + DeleteRepoResponsePb pb = new DeleteRepoResponsePb(); + + return pb; + } + + static DeleteRepoResponse fromPb(DeleteRepoResponsePb pb) { + DeleteRepoResponse model = new DeleteRepoResponse(); + + return model; + } + + public static class DeleteRepoResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteRepoResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteRepoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteRepoResponseDeserializer extends JsonDeserializer { + @Override + public DeleteRepoResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteRepoResponsePb pb = mapper.readValue(p, DeleteRepoResponsePb.class); + return DeleteRepoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponsePb.java new file mode 100755 index 000000000..1b8b52bfc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteRepoResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteRepoResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java index f0b7586b9..e94f9e252 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteResponse.DeleteResponseSerializer.class) +@JsonDeserialize(using = DeleteResponse.DeleteResponseDeserializer.class) public class DeleteResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteResponse.class).toString(); } + + DeleteResponsePb toPb() { + DeleteResponsePb pb = new DeleteResponsePb(); + + return pb; + } + + static DeleteResponse fromPb(DeleteResponsePb pb) { + DeleteResponse model = new DeleteResponse(); + + return model; + } + + public static class DeleteResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteResponseDeserializer extends JsonDeserializer { + @Override + public DeleteResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteResponsePb pb = mapper.readValue(p, DeleteResponsePb.class); + return DeleteResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponsePb.java new file mode 100755 index 000000000..173f6a305 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java index e1ff41ce8..af63788b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java @@ -4,13 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteScope.DeleteScopeSerializer.class) +@JsonDeserialize(using = DeleteScope.DeleteScopeDeserializer.class) public class DeleteScope { /** Name of the scope to delete. */ - @JsonProperty("scope") private String scope; public DeleteScope setScope(String scope) { @@ -39,4 +49,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteScope.class).add("scope", scope).toString(); } + + DeleteScopePb toPb() { + DeleteScopePb pb = new DeleteScopePb(); + pb.setScope(scope); + + return pb; + } + + static DeleteScope fromPb(DeleteScopePb pb) { + DeleteScope model = new DeleteScope(); + model.setScope(pb.getScope()); + + return model; + } + + public static class DeleteScopeSerializer extends JsonSerializer { + @Override + public void serialize(DeleteScope value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteScopePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteScopeDeserializer extends JsonDeserializer { + @Override + public DeleteScope deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteScopePb pb = mapper.readValue(p, DeleteScopePb.class); + return DeleteScope.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopePb.java new file mode 100755 index 000000000..8e53653db --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopePb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteScopePb { + @JsonProperty("scope") + private String scope; + + public DeleteScopePb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteScopePb that = (DeleteScopePb) o; + return Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(scope); + } + + @Override + public String toString() { + return new ToStringer(DeleteScopePb.class).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java index 4aa4c998b..8218b68d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteScopeResponse.DeleteScopeResponseSerializer.class) +@JsonDeserialize(using = DeleteScopeResponse.DeleteScopeResponseDeserializer.class) public class DeleteScopeResponse { @Override @@ -25,4 +37,37 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteScopeResponse.class).toString(); } + + DeleteScopeResponsePb toPb() { + DeleteScopeResponsePb pb = new DeleteScopeResponsePb(); + + return pb; + } + + static DeleteScopeResponse fromPb(DeleteScopeResponsePb pb) { + DeleteScopeResponse model = new DeleteScopeResponse(); + + return model; + } + + public static class DeleteScopeResponseSerializer extends JsonSerializer { + @Override + public void serialize(DeleteScopeResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteScopeResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteScopeResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteScopeResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteScopeResponsePb pb = mapper.readValue(p, DeleteScopeResponsePb.class); + return DeleteScopeResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponsePb.java new file mode 100755 index 000000000..c08363f8f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteScopeResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteScopeResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java index effc58997..88f3d8560 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteSecret.DeleteSecretSerializer.class) +@JsonDeserialize(using = DeleteSecret.DeleteSecretDeserializer.class) public class DeleteSecret { /** Name of the secret to delete. */ - @JsonProperty("key") private String key; /** The name of the scope that contains the secret to delete. */ - @JsonProperty("scope") private String scope; public DeleteSecret setKey(String key) { @@ -52,4 +61,39 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteSecret.class).add("key", key).add("scope", scope).toString(); } + + DeleteSecretPb toPb() { + DeleteSecretPb pb = new DeleteSecretPb(); + pb.setKey(key); + pb.setScope(scope); + + return pb; + } + + static DeleteSecret fromPb(DeleteSecretPb pb) { + DeleteSecret model = new DeleteSecret(); + model.setKey(pb.getKey()); + model.setScope(pb.getScope()); + + return model; + } + + public static class DeleteSecretSerializer extends JsonSerializer { + @Override + public void serialize(DeleteSecret value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSecretPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSecretDeserializer extends JsonDeserializer { + @Override + public DeleteSecret deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSecretPb pb = mapper.readValue(p, DeleteSecretPb.class); + return DeleteSecret.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretPb.java new file mode 100755 index 000000000..490799e9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretPb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class DeleteSecretPb { + @JsonProperty("key") + private String key; + + @JsonProperty("scope") + private String scope; + + public DeleteSecretPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public DeleteSecretPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSecretPb that = (DeleteSecretPb) o; + return Objects.equals(key, that.key) && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(key, scope); + } + + @Override + public String toString() { + return new ToStringer(DeleteSecretPb.class).add("key", key).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java index 1ebe0ea0f..6019ac55e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = DeleteSecretResponse.DeleteSecretResponseSerializer.class) +@JsonDeserialize(using = DeleteSecretResponse.DeleteSecretResponseDeserializer.class) public class DeleteSecretResponse { @Override @@ -25,4 +37,38 @@ public int hashCode() { public String toString() { return new ToStringer(DeleteSecretResponse.class).toString(); } + + DeleteSecretResponsePb toPb() { + DeleteSecretResponsePb pb = new DeleteSecretResponsePb(); + + return pb; + } + + static DeleteSecretResponse fromPb(DeleteSecretResponsePb pb) { + DeleteSecretResponse model = new DeleteSecretResponse(); + + return model; + } + + public static class DeleteSecretResponseSerializer extends JsonSerializer { + @Override + public void serialize( + DeleteSecretResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + DeleteSecretResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class DeleteSecretResponseDeserializer + extends JsonDeserializer { + @Override + public DeleteSecretResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + DeleteSecretResponsePb pb = mapper.readValue(p, DeleteSecretResponsePb.class); + return DeleteSecretResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponsePb.java new file mode 100755 index 000000000..c24eb7b03 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class DeleteSecretResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteSecretResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java index c01ad34b4..bf3679b4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java @@ -3,13 +3,23 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Export a workspace object */ @Generated +@JsonSerialize(using = ExportRequest.ExportRequestSerializer.class) +@JsonDeserialize(using = ExportRequest.ExportRequestDeserializer.class) public class ExportRequest { /** * This specifies the format of the exported file. By default, this is `SOURCE`. @@ -24,16 +34,12 @@ public class ExportRequest { * is exported depending on the objects type. Directory exports will include notebooks and * workspace files. */ - @JsonIgnore - @QueryParam("format") private ExportFormat format; /** * The absolute path of the object or directory. Exporting a directory is only supported for the * `DBC`, `SOURCE`, and `AUTO` format. */ - @JsonIgnore - @QueryParam("path") private String path; public ExportRequest setFormat(ExportFormat format) { @@ -71,4 +77,39 @@ public int hashCode() { public String toString() { return new ToStringer(ExportRequest.class).add("format", format).add("path", path).toString(); } + + ExportRequestPb toPb() { + ExportRequestPb pb = new ExportRequestPb(); + pb.setFormat(format); + pb.setPath(path); + + return pb; + } + + static ExportRequest fromPb(ExportRequestPb pb) { + ExportRequest model = new ExportRequest(); + model.setFormat(pb.getFormat()); + model.setPath(pb.getPath()); + + return model; + } + + public static class ExportRequestSerializer extends JsonSerializer { + @Override + public void serialize(ExportRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportRequestDeserializer extends JsonDeserializer { + @Override + public ExportRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportRequestPb pb = mapper.readValue(p, ExportRequestPb.class); + return ExportRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequestPb.java new file mode 100755 index 000000000..d3b77a0bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Export a workspace object */ +@Generated +class ExportRequestPb { + @JsonIgnore + @QueryParam("format") + private ExportFormat format; + + @JsonIgnore + @QueryParam("path") + private String path; + + public ExportRequestPb setFormat(ExportFormat format) { + this.format = format; + return this; + } + + public ExportFormat getFormat() { + return format; + } + + public ExportRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportRequestPb that = (ExportRequestPb) o; + return Objects.equals(format, that.format) && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(format, path); + } + + @Override + public String toString() { + return new ToStringer(ExportRequestPb.class).add("format", format).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java index 61360554b..f3bf60546 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** @@ -12,16 +21,16 @@ * returned by this endpoint. */ @Generated +@JsonSerialize(using = ExportResponse.ExportResponseSerializer.class) +@JsonDeserialize(using = ExportResponse.ExportResponseDeserializer.class) public class ExportResponse { /** * The base64-encoded content. If the limit (10MB) is exceeded, exception with error code * **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. */ - @JsonProperty("content") private String content; /** The file type of the exported file. */ - @JsonProperty("file_type") private String fileType; public ExportResponse setContent(String content) { @@ -62,4 +71,40 @@ public String toString() { .add("fileType", fileType) .toString(); } + + ExportResponsePb toPb() { + ExportResponsePb pb = new ExportResponsePb(); + pb.setContent(content); + pb.setFileType(fileType); + + return pb; + } + + static ExportResponse fromPb(ExportResponsePb pb) { + ExportResponse model = new ExportResponse(); + model.setContent(pb.getContent()); + model.setFileType(pb.getFileType()); + + return model; + } + + public static class ExportResponseSerializer extends JsonSerializer { + @Override + public void serialize(ExportResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ExportResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ExportResponseDeserializer extends JsonDeserializer { + @Override + public ExportResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ExportResponsePb pb = mapper.readValue(p, ExportResponsePb.class); + return ExportResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponsePb.java new file mode 100755 index 000000000..533cb1806 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponsePb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The request field `direct_download` determines whether a JSON response or binary contents are + * returned by this endpoint. + */ +@Generated +class ExportResponsePb { + @JsonProperty("content") + private String content; + + @JsonProperty("file_type") + private String fileType; + + public ExportResponsePb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public ExportResponsePb setFileType(String fileType) { + this.fileType = fileType; + return this; + } + + public String getFileType() { + return fileType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportResponsePb that = (ExportResponsePb) o; + return Objects.equals(content, that.content) && Objects.equals(fileType, that.fileType); + } + + @Override + public int hashCode() { + return Objects.hash(content, fileType); + } + + @Override + public String toString() { + return new ToStringer(ExportResponsePb.class) + .add("content", content) + .add("fileType", fileType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java index 47c2bd0ce..e48f37b57 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get secret ACL details */ @Generated +@JsonSerialize(using = GetAclRequest.GetAclRequestSerializer.class) +@JsonDeserialize(using = GetAclRequest.GetAclRequestDeserializer.class) public class GetAclRequest { /** The principal to fetch ACL information for. */ - @JsonIgnore - @QueryParam("principal") private String principal; /** The name of the scope to fetch ACL information from. */ - @JsonIgnore - @QueryParam("scope") private String scope; public GetAclRequest setPrincipal(String principal) { @@ -59,4 +65,39 @@ public String toString() { .add("scope", scope) .toString(); } + + GetAclRequestPb toPb() { + GetAclRequestPb pb = new GetAclRequestPb(); + pb.setPrincipal(principal); + pb.setScope(scope); + + return pb; + } + + static GetAclRequest fromPb(GetAclRequestPb pb) { + GetAclRequest model = new GetAclRequest(); + model.setPrincipal(pb.getPrincipal()); + model.setScope(pb.getScope()); + + return model; + } + + public static class GetAclRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetAclRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetAclRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetAclRequestDeserializer extends JsonDeserializer { + @Override + public GetAclRequest deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetAclRequestPb pb = mapper.readValue(p, GetAclRequestPb.class); + return GetAclRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequestPb.java new file mode 100755 index 000000000..ee596fd60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequestPb.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get secret ACL details */ +@Generated +class GetAclRequestPb { + @JsonIgnore + @QueryParam("principal") + private String principal; + + @JsonIgnore + @QueryParam("scope") + private String scope; + + public GetAclRequestPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public GetAclRequestPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAclRequestPb that = (GetAclRequestPb) o; + return Objects.equals(principal, that.principal) && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(principal, scope); + } + + @Override + public String toString() { + return new ToStringer(GetAclRequestPb.class) + .add("principal", principal) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java index 2dea34f1e..cd4c91868 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a credential entry */ @Generated +@JsonSerialize(using = GetCredentialsRequest.GetCredentialsRequestSerializer.class) +@JsonDeserialize(using = GetCredentialsRequest.GetCredentialsRequestDeserializer.class) public class GetCredentialsRequest { /** The ID for the corresponding credential to access. */ - @JsonIgnore private Long credentialId; + private Long credentialId; public GetCredentialsRequest setCredentialId(Long credentialId) { this.credentialId = credentialId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetCredentialsRequest.class).add("credentialId", credentialId).toString(); } + + GetCredentialsRequestPb toPb() { + GetCredentialsRequestPb pb = new GetCredentialsRequestPb(); + pb.setCredentialId(credentialId); + + return pb; + } + + static GetCredentialsRequest fromPb(GetCredentialsRequestPb pb) { + GetCredentialsRequest model = new GetCredentialsRequest(); + model.setCredentialId(pb.getCredentialId()); + + return model; + } + + public static class GetCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public GetCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCredentialsRequestPb pb = mapper.readValue(p, GetCredentialsRequestPb.class); + return GetCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequestPb.java new file mode 100755 index 000000000..e4cf5a23a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequestPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a credential entry */ +@Generated +class GetCredentialsRequestPb { + @JsonIgnore private Long credentialId; + + public GetCredentialsRequestPb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsRequestPb that = (GetCredentialsRequestPb) o; + return Objects.equals(credentialId, that.credentialId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsRequestPb.class) + .add("credentialId", credentialId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java index ef4da2906..431640d58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetCredentialsResponse.GetCredentialsResponseSerializer.class) +@JsonDeserialize(using = GetCredentialsResponse.GetCredentialsResponseDeserializer.class) public class GetCredentialsResponse { /** ID of the credential object in the workspace. */ - @JsonProperty("credential_id") private Long credentialId; /** The Git provider associated with the credential. */ - @JsonProperty("git_provider") private String gitProvider; /** * The username or email provided with your Git provider account and associated with the * credential. */ - @JsonProperty("git_username") private String gitUsername; public GetCredentialsResponse setCredentialId(Long credentialId) { @@ -74,4 +82,45 @@ public String toString() { .add("gitUsername", gitUsername) .toString(); } + + GetCredentialsResponsePb toPb() { + GetCredentialsResponsePb pb = new GetCredentialsResponsePb(); + pb.setCredentialId(credentialId); + pb.setGitProvider(gitProvider); + pb.setGitUsername(gitUsername); + + return pb; + } + + static GetCredentialsResponse fromPb(GetCredentialsResponsePb pb) { + GetCredentialsResponse model = new GetCredentialsResponse(); + model.setCredentialId(pb.getCredentialId()); + model.setGitProvider(pb.getGitProvider()); + model.setGitUsername(pb.getGitUsername()); + + return model; + } + + public static class GetCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public GetCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetCredentialsResponsePb pb = mapper.readValue(p, GetCredentialsResponsePb.class); + return GetCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponsePb.java new file mode 100755 index 000000000..094cd81ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponsePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetCredentialsResponsePb { + @JsonProperty("credential_id") + private Long credentialId; + + @JsonProperty("git_provider") + private String gitProvider; + + @JsonProperty("git_username") + private String gitUsername; + + public GetCredentialsResponsePb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + public GetCredentialsResponsePb setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public GetCredentialsResponsePb setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; + return this; + } + + public String getGitUsername() { + return gitUsername; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsResponsePb that = (GetCredentialsResponsePb) o; + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, gitProvider, gitUsername); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsResponsePb.class) + .add("credentialId", credentialId) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequest.java index 6fc04024e..c39d36e6e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequest.java @@ -4,14 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get repo permission levels */ @Generated +@JsonSerialize( + using = GetRepoPermissionLevelsRequest.GetRepoPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = GetRepoPermissionLevelsRequest.GetRepoPermissionLevelsRequestDeserializer.class) public class GetRepoPermissionLevelsRequest { /** The repo for which to get or manage permissions. */ - @JsonIgnore private String repoId; + private String repoId; public GetRepoPermissionLevelsRequest setRepoId(String repoId) { this.repoId = repoId; @@ -39,4 +52,42 @@ public int hashCode() { public String toString() { return new ToStringer(GetRepoPermissionLevelsRequest.class).add("repoId", repoId).toString(); } + + GetRepoPermissionLevelsRequestPb toPb() { + GetRepoPermissionLevelsRequestPb pb = new GetRepoPermissionLevelsRequestPb(); + pb.setRepoId(repoId); + + return pb; + } + + static GetRepoPermissionLevelsRequest fromPb(GetRepoPermissionLevelsRequestPb pb) { + GetRepoPermissionLevelsRequest model = new GetRepoPermissionLevelsRequest(); + model.setRepoId(pb.getRepoId()); + + return model; + } + + public static class GetRepoPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRepoPermissionLevelsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRepoPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRepoPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetRepoPermissionLevelsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRepoPermissionLevelsRequestPb pb = + mapper.readValue(p, GetRepoPermissionLevelsRequestPb.class); + return GetRepoPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequestPb.java new file mode 100755 index 000000000..22657f07f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get repo permission levels */ +@Generated +class GetRepoPermissionLevelsRequestPb { + @JsonIgnore private String repoId; + + public GetRepoPermissionLevelsRequestPb setRepoId(String repoId) { + this.repoId = repoId; + return this; + } + + public String getRepoId() { + return repoId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoPermissionLevelsRequestPb that = (GetRepoPermissionLevelsRequestPb) o; + return Objects.equals(repoId, that.repoId); + } + + @Override + public int hashCode() { + return Objects.hash(repoId); + } + + @Override + public String toString() { + return new ToStringer(GetRepoPermissionLevelsRequestPb.class).add("repoId", repoId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java index 77fd943c6..1a3aacd35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java @@ -4,14 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = GetRepoPermissionLevelsResponse.GetRepoPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = GetRepoPermissionLevelsResponse.GetRepoPermissionLevelsResponseDeserializer.class) public class GetRepoPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetRepoPermissionLevelsResponse setPermissionLevels( @@ -43,4 +55,42 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetRepoPermissionLevelsResponsePb toPb() { + GetRepoPermissionLevelsResponsePb pb = new GetRepoPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetRepoPermissionLevelsResponse fromPb(GetRepoPermissionLevelsResponsePb pb) { + GetRepoPermissionLevelsResponse model = new GetRepoPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetRepoPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRepoPermissionLevelsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRepoPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRepoPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetRepoPermissionLevelsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRepoPermissionLevelsResponsePb pb = + mapper.readValue(p, GetRepoPermissionLevelsResponsePb.class); + return GetRepoPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponsePb.java new file mode 100755 index 000000000..ab4a5919b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponsePb.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetRepoPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetRepoPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoPermissionLevelsResponsePb that = (GetRepoPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetRepoPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java index b37e72f6e..f7dbd2156 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get repo permissions */ @Generated +@JsonSerialize(using = GetRepoPermissionsRequest.GetRepoPermissionsRequestSerializer.class) +@JsonDeserialize(using = GetRepoPermissionsRequest.GetRepoPermissionsRequestDeserializer.class) public class GetRepoPermissionsRequest { /** The repo for which to get or manage permissions. */ - @JsonIgnore private String repoId; + private String repoId; public GetRepoPermissionsRequest setRepoId(String repoId) { this.repoId = repoId; @@ -39,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(GetRepoPermissionsRequest.class).add("repoId", repoId).toString(); } + + GetRepoPermissionsRequestPb toPb() { + GetRepoPermissionsRequestPb pb = new GetRepoPermissionsRequestPb(); + pb.setRepoId(repoId); + + return pb; + } + + static GetRepoPermissionsRequest fromPb(GetRepoPermissionsRequestPb pb) { + GetRepoPermissionsRequest model = new GetRepoPermissionsRequest(); + model.setRepoId(pb.getRepoId()); + + return model; + } + + public static class GetRepoPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetRepoPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRepoPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRepoPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetRepoPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRepoPermissionsRequestPb pb = mapper.readValue(p, GetRepoPermissionsRequestPb.class); + return GetRepoPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequestPb.java new file mode 100755 index 000000000..302d7d493 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get repo permissions */ +@Generated +class GetRepoPermissionsRequestPb { + @JsonIgnore private String repoId; + + public GetRepoPermissionsRequestPb setRepoId(String repoId) { + this.repoId = repoId; + return this; + } + + public String getRepoId() { + return repoId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoPermissionsRequestPb that = (GetRepoPermissionsRequestPb) o; + return Objects.equals(repoId, that.repoId); + } + + @Override + public int hashCode() { + return Objects.hash(repoId); + } + + @Override + public String toString() { + return new ToStringer(GetRepoPermissionsRequestPb.class).add("repoId", repoId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java index a241caa4d..ad0f1bc0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java @@ -4,14 +4,25 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a repo */ @Generated +@JsonSerialize(using = GetRepoRequest.GetRepoRequestSerializer.class) +@JsonDeserialize(using = GetRepoRequest.GetRepoRequestDeserializer.class) public class GetRepoRequest { /** ID of the Git folder (repo) object in the workspace. */ - @JsonIgnore private Long repoId; + private Long repoId; public GetRepoRequest setRepoId(Long repoId) { this.repoId = repoId; @@ -39,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetRepoRequest.class).add("repoId", repoId).toString(); } + + GetRepoRequestPb toPb() { + GetRepoRequestPb pb = new GetRepoRequestPb(); + pb.setRepoId(repoId); + + return pb; + } + + static GetRepoRequest fromPb(GetRepoRequestPb pb) { + GetRepoRequest model = new GetRepoRequest(); + model.setRepoId(pb.getRepoId()); + + return model; + } + + public static class GetRepoRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetRepoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRepoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRepoRequestDeserializer extends JsonDeserializer { + @Override + public GetRepoRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRepoRequestPb pb = mapper.readValue(p, GetRepoRequestPb.class); + return GetRepoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequestPb.java new file mode 100755 index 000000000..582ae2b70 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequestPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a repo */ +@Generated +class GetRepoRequestPb { + @JsonIgnore private Long repoId; + + public GetRepoRequestPb setRepoId(Long repoId) { + this.repoId = repoId; + return this; + } + + public Long getRepoId() { + return repoId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoRequestPb that = (GetRepoRequestPb) o; + return Objects.equals(repoId, that.repoId); + } + + @Override + public int hashCode() { + return Objects.hash(repoId); + } + + @Override + public String toString() { + return new ToStringer(GetRepoRequestPb.class).add("repoId", repoId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java index a34c0cc98..e4a59fe8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java @@ -4,37 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetRepoResponse.GetRepoResponseSerializer.class) +@JsonDeserialize(using = GetRepoResponse.GetRepoResponseDeserializer.class) public class GetRepoResponse { /** Branch that the local version of the repo is checked out to. */ - @JsonProperty("branch") private String branch; /** SHA-1 hash representing the commit ID of the current HEAD of the repo. */ - @JsonProperty("head_commit_id") private String headCommitId; /** ID of the Git folder (repo) object in the workspace. */ - @JsonProperty("id") private Long id; /** Path of the Git folder (repo) in the workspace. */ - @JsonProperty("path") private String path; /** Git provider of the linked Git repository. */ - @JsonProperty("provider") private String provider; /** Sparse checkout settings for the Git folder (repo). */ - @JsonProperty("sparse_checkout") private SparseCheckout sparseCheckout; /** URL of the linked Git repository. */ - @JsonProperty("url") private String url; public GetRepoResponse setBranch(String branch) { @@ -131,4 +135,50 @@ public String toString() { .add("url", url) .toString(); } + + GetRepoResponsePb toPb() { + GetRepoResponsePb pb = new GetRepoResponsePb(); + pb.setBranch(branch); + pb.setHeadCommitId(headCommitId); + pb.setId(id); + pb.setPath(path); + pb.setProvider(provider); + pb.setSparseCheckout(sparseCheckout); + pb.setUrl(url); + + return pb; + } + + static GetRepoResponse fromPb(GetRepoResponsePb pb) { + GetRepoResponse model = new GetRepoResponse(); + model.setBranch(pb.getBranch()); + model.setHeadCommitId(pb.getHeadCommitId()); + model.setId(pb.getId()); + model.setPath(pb.getPath()); + model.setProvider(pb.getProvider()); + model.setSparseCheckout(pb.getSparseCheckout()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class GetRepoResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetRepoResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetRepoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetRepoResponseDeserializer extends JsonDeserializer { + @Override + public GetRepoResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetRepoResponsePb pb = mapper.readValue(p, GetRepoResponsePb.class); + return GetRepoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponsePb.java new file mode 100755 index 000000000..937448b7e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponsePb.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetRepoResponsePb { + @JsonProperty("branch") + private String branch; + + @JsonProperty("head_commit_id") + private String headCommitId; + + @JsonProperty("id") + private Long id; + + @JsonProperty("path") + private String path; + + @JsonProperty("provider") + private String provider; + + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + @JsonProperty("url") + private String url; + + public GetRepoResponsePb setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public GetRepoResponsePb setHeadCommitId(String headCommitId) { + this.headCommitId = headCommitId; + return this; + } + + public String getHeadCommitId() { + return headCommitId; + } + + public GetRepoResponsePb setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public GetRepoResponsePb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public GetRepoResponsePb setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public GetRepoResponsePb setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public GetRepoResponsePb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoResponsePb that = (GetRepoResponsePb) o; + return Objects.equals(branch, that.branch) + && Objects.equals(headCommitId, that.headCommitId) + && Objects.equals(id, that.id) + && Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(GetRepoResponsePb.class) + .add("branch", branch) + .add("headCommitId", headCommitId) + .add("id", id) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java index db04b0bf4..9ddd9f2be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get a secret */ @Generated +@JsonSerialize(using = GetSecretRequest.GetSecretRequestSerializer.class) +@JsonDeserialize(using = GetSecretRequest.GetSecretRequestDeserializer.class) public class GetSecretRequest { /** The key to fetch secret for. */ - @JsonIgnore - @QueryParam("key") private String key; /** The name of the scope to fetch secret information from. */ - @JsonIgnore - @QueryParam("scope") private String scope; public GetSecretRequest setKey(String key) { @@ -56,4 +62,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetSecretRequest.class).add("key", key).add("scope", scope).toString(); } + + GetSecretRequestPb toPb() { + GetSecretRequestPb pb = new GetSecretRequestPb(); + pb.setKey(key); + pb.setScope(scope); + + return pb; + } + + static GetSecretRequest fromPb(GetSecretRequestPb pb) { + GetSecretRequest model = new GetSecretRequest(); + model.setKey(pb.getKey()); + model.setScope(pb.getScope()); + + return model; + } + + public static class GetSecretRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetSecretRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSecretRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSecretRequestDeserializer extends JsonDeserializer { + @Override + public GetSecretRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSecretRequestPb pb = mapper.readValue(p, GetSecretRequestPb.class); + return GetSecretRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequestPb.java new file mode 100755 index 000000000..2a345dcb4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequestPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a secret */ +@Generated +class GetSecretRequestPb { + @JsonIgnore + @QueryParam("key") + private String key; + + @JsonIgnore + @QueryParam("scope") + private String scope; + + public GetSecretRequestPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public GetSecretRequestPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretRequestPb that = (GetSecretRequestPb) o; + return Objects.equals(key, that.key) && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(key, scope); + } + + @Override + public String toString() { + return new ToStringer(GetSecretRequestPb.class).add("key", key).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java index e763f9ea6..e67d1c09a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = GetSecretResponse.GetSecretResponseSerializer.class) +@JsonDeserialize(using = GetSecretResponse.GetSecretResponseDeserializer.class) public class GetSecretResponse { /** A unique name to identify the secret. */ - @JsonProperty("key") private String key; /** The value of the secret in its byte representation. */ - @JsonProperty("value") private String value; public GetSecretResponse setKey(String key) { @@ -52,4 +61,40 @@ public int hashCode() { public String toString() { return new ToStringer(GetSecretResponse.class).add("key", key).add("value", value).toString(); } + + GetSecretResponsePb toPb() { + GetSecretResponsePb pb = new GetSecretResponsePb(); + pb.setKey(key); + pb.setValue(value); + + return pb; + } + + static GetSecretResponse fromPb(GetSecretResponsePb pb) { + GetSecretResponse model = new GetSecretResponse(); + model.setKey(pb.getKey()); + model.setValue(pb.getValue()); + + return model; + } + + public static class GetSecretResponseSerializer extends JsonSerializer { + @Override + public void serialize(GetSecretResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetSecretResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetSecretResponseDeserializer extends JsonDeserializer { + @Override + public GetSecretResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetSecretResponsePb pb = mapper.readValue(p, GetSecretResponsePb.class); + return GetSecretResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponsePb.java new file mode 100755 index 000000000..427a1db9b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponsePb.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class GetSecretResponsePb { + @JsonProperty("key") + private String key; + + @JsonProperty("value") + private String value; + + public GetSecretResponsePb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public GetSecretResponsePb setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretResponsePb that = (GetSecretResponsePb) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(GetSecretResponsePb.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java index 34b1d0bbf..8b0efc9a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get status */ @Generated +@JsonSerialize(using = GetStatusRequest.GetStatusRequestSerializer.class) +@JsonDeserialize(using = GetStatusRequest.GetStatusRequestDeserializer.class) public class GetStatusRequest { /** The absolute path of the notebook or directory. */ - @JsonIgnore - @QueryParam("path") private String path; public GetStatusRequest setPath(String path) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(GetStatusRequest.class).add("path", path).toString(); } + + GetStatusRequestPb toPb() { + GetStatusRequestPb pb = new GetStatusRequestPb(); + pb.setPath(path); + + return pb; + } + + static GetStatusRequest fromPb(GetStatusRequestPb pb) { + GetStatusRequest model = new GetStatusRequest(); + model.setPath(pb.getPath()); + + return model; + } + + public static class GetStatusRequestSerializer extends JsonSerializer { + @Override + public void serialize(GetStatusRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetStatusRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetStatusRequestDeserializer extends JsonDeserializer { + @Override + public GetStatusRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetStatusRequestPb pb = mapper.readValue(p, GetStatusRequestPb.class); + return GetStatusRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequestPb.java new file mode 100755 index 000000000..c1cc2ac24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get status */ +@Generated +class GetStatusRequestPb { + @JsonIgnore + @QueryParam("path") + private String path; + + public GetStatusRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStatusRequestPb that = (GetStatusRequestPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(GetStatusRequestPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java index c7ea38824..e8ab0f544 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java @@ -4,17 +4,34 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get workspace object permission levels */ @Generated +@JsonSerialize( + using = + GetWorkspaceObjectPermissionLevelsRequest + .GetWorkspaceObjectPermissionLevelsRequestSerializer.class) +@JsonDeserialize( + using = + GetWorkspaceObjectPermissionLevelsRequest + .GetWorkspaceObjectPermissionLevelsRequestDeserializer.class) public class GetWorkspaceObjectPermissionLevelsRequest { /** The workspace object for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectId; + private String workspaceObjectId; /** The workspace object type for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectType; + private String workspaceObjectType; public GetWorkspaceObjectPermissionLevelsRequest setWorkspaceObjectId(String workspaceObjectId) { this.workspaceObjectId = workspaceObjectId; @@ -56,4 +73,49 @@ public String toString() { .add("workspaceObjectType", workspaceObjectType) .toString(); } + + GetWorkspaceObjectPermissionLevelsRequestPb toPb() { + GetWorkspaceObjectPermissionLevelsRequestPb pb = + new GetWorkspaceObjectPermissionLevelsRequestPb(); + pb.setWorkspaceObjectId(workspaceObjectId); + pb.setWorkspaceObjectType(workspaceObjectType); + + return pb; + } + + static GetWorkspaceObjectPermissionLevelsRequest fromPb( + GetWorkspaceObjectPermissionLevelsRequestPb pb) { + GetWorkspaceObjectPermissionLevelsRequest model = + new GetWorkspaceObjectPermissionLevelsRequest(); + model.setWorkspaceObjectId(pb.getWorkspaceObjectId()); + model.setWorkspaceObjectType(pb.getWorkspaceObjectType()); + + return model; + } + + public static class GetWorkspaceObjectPermissionLevelsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceObjectPermissionLevelsRequest value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetWorkspaceObjectPermissionLevelsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceObjectPermissionLevelsRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceObjectPermissionLevelsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceObjectPermissionLevelsRequestPb pb = + mapper.readValue(p, GetWorkspaceObjectPermissionLevelsRequestPb.class); + return GetWorkspaceObjectPermissionLevelsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequestPb.java new file mode 100755 index 000000000..b2947d4e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get workspace object permission levels */ +@Generated +class GetWorkspaceObjectPermissionLevelsRequestPb { + @JsonIgnore private String workspaceObjectId; + + @JsonIgnore private String workspaceObjectType; + + public GetWorkspaceObjectPermissionLevelsRequestPb setWorkspaceObjectId( + String workspaceObjectId) { + this.workspaceObjectId = workspaceObjectId; + return this; + } + + public String getWorkspaceObjectId() { + return workspaceObjectId; + } + + public GetWorkspaceObjectPermissionLevelsRequestPb setWorkspaceObjectType( + String workspaceObjectType) { + this.workspaceObjectType = workspaceObjectType; + return this; + } + + public String getWorkspaceObjectType() { + return workspaceObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceObjectPermissionLevelsRequestPb that = + (GetWorkspaceObjectPermissionLevelsRequestPb) o; + return Objects.equals(workspaceObjectId, that.workspaceObjectId) + && Objects.equals(workspaceObjectType, that.workspaceObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceObjectId, workspaceObjectType); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceObjectPermissionLevelsRequestPb.class) + .add("workspaceObjectId", workspaceObjectId) + .add("workspaceObjectType", workspaceObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponse.java index 3fc8d1351..32421af6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponse.java @@ -4,14 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + GetWorkspaceObjectPermissionLevelsResponse + .GetWorkspaceObjectPermissionLevelsResponseSerializer.class) +@JsonDeserialize( + using = + GetWorkspaceObjectPermissionLevelsResponse + .GetWorkspaceObjectPermissionLevelsResponseDeserializer.class) public class GetWorkspaceObjectPermissionLevelsResponse { /** Specific permission levels */ - @JsonProperty("permission_levels") private Collection permissionLevels; public GetWorkspaceObjectPermissionLevelsResponse setPermissionLevels( @@ -44,4 +60,47 @@ public String toString() { .add("permissionLevels", permissionLevels) .toString(); } + + GetWorkspaceObjectPermissionLevelsResponsePb toPb() { + GetWorkspaceObjectPermissionLevelsResponsePb pb = + new GetWorkspaceObjectPermissionLevelsResponsePb(); + pb.setPermissionLevels(permissionLevels); + + return pb; + } + + static GetWorkspaceObjectPermissionLevelsResponse fromPb( + GetWorkspaceObjectPermissionLevelsResponsePb pb) { + GetWorkspaceObjectPermissionLevelsResponse model = + new GetWorkspaceObjectPermissionLevelsResponse(); + model.setPermissionLevels(pb.getPermissionLevels()); + + return model; + } + + public static class GetWorkspaceObjectPermissionLevelsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceObjectPermissionLevelsResponse value, + JsonGenerator gen, + SerializerProvider provider) + throws IOException { + GetWorkspaceObjectPermissionLevelsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceObjectPermissionLevelsResponseDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceObjectPermissionLevelsResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceObjectPermissionLevelsResponsePb pb = + mapper.readValue(p, GetWorkspaceObjectPermissionLevelsResponsePb.class); + return GetWorkspaceObjectPermissionLevelsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponsePb.java new file mode 100755 index 000000000..c5a29becc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsResponsePb.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class GetWorkspaceObjectPermissionLevelsResponsePb { + @JsonProperty("permission_levels") + private Collection permissionLevels; + + public GetWorkspaceObjectPermissionLevelsResponsePb setPermissionLevels( + Collection permissionLevels) { + this.permissionLevels = permissionLevels; + return this; + } + + public Collection getPermissionLevels() { + return permissionLevels; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceObjectPermissionLevelsResponsePb that = + (GetWorkspaceObjectPermissionLevelsResponsePb) o; + return Objects.equals(permissionLevels, that.permissionLevels); + } + + @Override + public int hashCode() { + return Objects.hash(permissionLevels); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceObjectPermissionLevelsResponsePb.class) + .add("permissionLevels", permissionLevels) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java index baad343a5..9377667df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java @@ -4,17 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get workspace object permissions */ @Generated +@JsonSerialize( + using = + GetWorkspaceObjectPermissionsRequest.GetWorkspaceObjectPermissionsRequestSerializer.class) +@JsonDeserialize( + using = + GetWorkspaceObjectPermissionsRequest.GetWorkspaceObjectPermissionsRequestDeserializer.class) public class GetWorkspaceObjectPermissionsRequest { /** The workspace object for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectId; + private String workspaceObjectId; /** The workspace object type for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectType; + private String workspaceObjectType; public GetWorkspaceObjectPermissionsRequest setWorkspaceObjectId(String workspaceObjectId) { this.workspaceObjectId = workspaceObjectId; @@ -55,4 +70,44 @@ public String toString() { .add("workspaceObjectType", workspaceObjectType) .toString(); } + + GetWorkspaceObjectPermissionsRequestPb toPb() { + GetWorkspaceObjectPermissionsRequestPb pb = new GetWorkspaceObjectPermissionsRequestPb(); + pb.setWorkspaceObjectId(workspaceObjectId); + pb.setWorkspaceObjectType(workspaceObjectType); + + return pb; + } + + static GetWorkspaceObjectPermissionsRequest fromPb(GetWorkspaceObjectPermissionsRequestPb pb) { + GetWorkspaceObjectPermissionsRequest model = new GetWorkspaceObjectPermissionsRequest(); + model.setWorkspaceObjectId(pb.getWorkspaceObjectId()); + model.setWorkspaceObjectType(pb.getWorkspaceObjectType()); + + return model; + } + + public static class GetWorkspaceObjectPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + GetWorkspaceObjectPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + GetWorkspaceObjectPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class GetWorkspaceObjectPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public GetWorkspaceObjectPermissionsRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + GetWorkspaceObjectPermissionsRequestPb pb = + mapper.readValue(p, GetWorkspaceObjectPermissionsRequestPb.class); + return GetWorkspaceObjectPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequestPb.java new file mode 100755 index 000000000..03298f7c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequestPb.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get workspace object permissions */ +@Generated +class GetWorkspaceObjectPermissionsRequestPb { + @JsonIgnore private String workspaceObjectId; + + @JsonIgnore private String workspaceObjectType; + + public GetWorkspaceObjectPermissionsRequestPb setWorkspaceObjectId(String workspaceObjectId) { + this.workspaceObjectId = workspaceObjectId; + return this; + } + + public String getWorkspaceObjectId() { + return workspaceObjectId; + } + + public GetWorkspaceObjectPermissionsRequestPb setWorkspaceObjectType(String workspaceObjectType) { + this.workspaceObjectType = workspaceObjectType; + return this; + } + + public String getWorkspaceObjectType() { + return workspaceObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceObjectPermissionsRequestPb that = (GetWorkspaceObjectPermissionsRequestPb) o; + return Objects.equals(workspaceObjectId, that.workspaceObjectId) + && Objects.equals(workspaceObjectType, that.workspaceObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceObjectId, workspaceObjectType); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceObjectPermissionsRequestPb.class) + .add("workspaceObjectId", workspaceObjectId) + .add("workspaceObjectType", workspaceObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java index 64260be1e..d6bc251c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java @@ -21,7 +21,7 @@ public CreateCredentialsResponse create(CreateCredentialsRequest request) { String path = "/api/2.0/git-credentials"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateCredentialsResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteCredentialsResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetCredentialsResponse get(GetCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetCredentialsResponse.class); } catch (IOException e) { @@ -73,7 +73,7 @@ public void update(UpdateCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateCredentialsResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java index ee39ba51d..341ade0c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java @@ -4,10 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Import.ImportSerializer.class) +@JsonDeserialize(using = Import.ImportDeserializer.class) public class Import { /** * The base64-encoded content. This has a limit of 10 MB. @@ -15,7 +26,6 @@ public class Import { *

If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is * thrown. This parameter might be absent, and instead a posted file is used. */ - @JsonProperty("content") private String content; /** @@ -31,25 +41,21 @@ public class Import { * format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown * format. */ - @JsonProperty("format") private ImportFormat format; /** The language of the object. This value is set only if the object type is `NOTEBOOK`. */ - @JsonProperty("language") private Language language; /** * The flag that specifies whether to overwrite existing object. It is `false` by default. For * `DBC` format, `overwrite` is not supported since it may contain a directory. */ - @JsonProperty("overwrite") private Boolean overwrite; /** * The absolute path of the object or directory. Importing a directory is only supported for the * `DBC` and `SOURCE` formats. */ - @JsonProperty("path") private String path; public Import setContent(String content) { @@ -124,4 +130,45 @@ public String toString() { .add("path", path) .toString(); } + + ImportPb toPb() { + ImportPb pb = new ImportPb(); + pb.setContent(content); + pb.setFormat(format); + pb.setLanguage(language); + pb.setOverwrite(overwrite); + pb.setPath(path); + + return pb; + } + + static Import fromPb(ImportPb pb) { + Import model = new Import(); + model.setContent(pb.getContent()); + model.setFormat(pb.getFormat()); + model.setLanguage(pb.getLanguage()); + model.setOverwrite(pb.getOverwrite()); + model.setPath(pb.getPath()); + + return model; + } + + public static class ImportSerializer extends JsonSerializer { + @Override + public void serialize(Import value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ImportPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ImportDeserializer extends JsonDeserializer { + @Override + public Import deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ImportPb pb = mapper.readValue(p, ImportPb.class); + return Import.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportPb.java new file mode 100755 index 000000000..411b6c994 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportPb.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class ImportPb { + @JsonProperty("content") + private String content; + + @JsonProperty("format") + private ImportFormat format; + + @JsonProperty("language") + private Language language; + + @JsonProperty("overwrite") + private Boolean overwrite; + + @JsonProperty("path") + private String path; + + public ImportPb setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public ImportPb setFormat(ImportFormat format) { + this.format = format; + return this; + } + + public ImportFormat getFormat() { + return format; + } + + public ImportPb setLanguage(Language language) { + this.language = language; + return this; + } + + public Language getLanguage() { + return language; + } + + public ImportPb setOverwrite(Boolean overwrite) { + this.overwrite = overwrite; + return this; + } + + public Boolean getOverwrite() { + return overwrite; + } + + public ImportPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ImportPb that = (ImportPb) o; + return Objects.equals(content, that.content) + && Objects.equals(format, that.format) + && Objects.equals(language, that.language) + && Objects.equals(overwrite, that.overwrite) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(content, format, language, overwrite, path); + } + + @Override + public String toString() { + return new ToStringer(ImportPb.class) + .add("content", content) + .add("format", format) + .add("language", language) + .add("overwrite", overwrite) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java index aac0f4f57..38d994d2b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = ImportResponse.ImportResponseSerializer.class) +@JsonDeserialize(using = ImportResponse.ImportResponseDeserializer.class) public class ImportResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(ImportResponse.class).toString(); } + + ImportResponsePb toPb() { + ImportResponsePb pb = new ImportResponsePb(); + + return pb; + } + + static ImportResponse fromPb(ImportResponsePb pb) { + ImportResponse model = new ImportResponse(); + + return model; + } + + public static class ImportResponseSerializer extends JsonSerializer { + @Override + public void serialize(ImportResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ImportResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ImportResponseDeserializer extends JsonDeserializer { + @Override + public ImportResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ImportResponsePb pb = mapper.readValue(p, ImportResponsePb.class); + return ImportResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponsePb.java new file mode 100755 index 000000000..3a526cc01 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class ImportResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ImportResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java index a3dde701a..388c698a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Lists ACLs */ @Generated +@JsonSerialize(using = ListAclsRequest.ListAclsRequestSerializer.class) +@JsonDeserialize(using = ListAclsRequest.ListAclsRequestDeserializer.class) public class ListAclsRequest { /** The name of the scope to fetch ACL information from. */ - @JsonIgnore - @QueryParam("scope") private String scope; public ListAclsRequest setScope(String scope) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListAclsRequest.class).add("scope", scope).toString(); } + + ListAclsRequestPb toPb() { + ListAclsRequestPb pb = new ListAclsRequestPb(); + pb.setScope(scope); + + return pb; + } + + static ListAclsRequest fromPb(ListAclsRequestPb pb) { + ListAclsRequest model = new ListAclsRequest(); + model.setScope(pb.getScope()); + + return model; + } + + public static class ListAclsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListAclsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAclsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAclsRequestDeserializer extends JsonDeserializer { + @Override + public ListAclsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAclsRequestPb pb = mapper.readValue(p, ListAclsRequestPb.class); + return ListAclsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequestPb.java new file mode 100755 index 000000000..0629f5124 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Lists ACLs */ +@Generated +class ListAclsRequestPb { + @JsonIgnore + @QueryParam("scope") + private String scope; + + public ListAclsRequestPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAclsRequestPb that = (ListAclsRequestPb) o; + return Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(scope); + } + + @Override + public String toString() { + return new ToStringer(ListAclsRequestPb.class).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java index 1f1e8d389..cd8855c36 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListAclsResponse.ListAclsResponseSerializer.class) +@JsonDeserialize(using = ListAclsResponse.ListAclsResponseDeserializer.class) public class ListAclsResponse { /** The associated ACLs rule applied to principals in the given scope. */ - @JsonProperty("items") private Collection items; public ListAclsResponse setItems(Collection items) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListAclsResponse.class).add("items", items).toString(); } + + ListAclsResponsePb toPb() { + ListAclsResponsePb pb = new ListAclsResponsePb(); + pb.setItems(items); + + return pb; + } + + static ListAclsResponse fromPb(ListAclsResponsePb pb) { + ListAclsResponse model = new ListAclsResponse(); + model.setItems(pb.getItems()); + + return model; + } + + public static class ListAclsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListAclsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListAclsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListAclsResponseDeserializer extends JsonDeserializer { + @Override + public ListAclsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListAclsResponsePb pb = mapper.readValue(p, ListAclsResponsePb.class); + return ListAclsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponsePb.java new file mode 100755 index 000000000..a03c8a3e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListAclsResponsePb { + @JsonProperty("items") + private Collection items; + + public ListAclsResponsePb setItems(Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAclsResponsePb that = (ListAclsResponsePb) o; + return Objects.equals(items, that.items); + } + + @Override + public int hashCode() { + return Objects.hash(items); + } + + @Override + public String toString() { + return new ToStringer(ListAclsResponsePb.class).add("items", items).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java index 6f3c22c24..61103b2bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListCredentialsResponse.ListCredentialsResponseSerializer.class) +@JsonDeserialize(using = ListCredentialsResponse.ListCredentialsResponseDeserializer.class) public class ListCredentialsResponse { /** List of credentials. */ - @JsonProperty("credentials") private Collection credentials; public ListCredentialsResponse setCredentials(Collection credentials) { @@ -40,4 +50,41 @@ public int hashCode() { public String toString() { return new ToStringer(ListCredentialsResponse.class).add("credentials", credentials).toString(); } + + ListCredentialsResponsePb toPb() { + ListCredentialsResponsePb pb = new ListCredentialsResponsePb(); + pb.setCredentials(credentials); + + return pb; + } + + static ListCredentialsResponse fromPb(ListCredentialsResponsePb pb) { + ListCredentialsResponse model = new ListCredentialsResponse(); + model.setCredentials(pb.getCredentials()); + + return model; + } + + public static class ListCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + ListCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public ListCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListCredentialsResponsePb pb = mapper.readValue(p, ListCredentialsResponsePb.class); + return ListCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponsePb.java new file mode 100755 index 000000000..8909d98a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponsePb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListCredentialsResponsePb { + @JsonProperty("credentials") + private Collection credentials; + + public ListCredentialsResponsePb setCredentials(Collection credentials) { + this.credentials = credentials; + return this; + } + + public Collection getCredentials() { + return credentials; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsResponsePb that = (ListCredentialsResponsePb) o; + return Objects.equals(credentials, that.credentials); + } + + @Override + public int hashCode() { + return Objects.hash(credentials); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsResponsePb.class) + .add("credentials", credentials) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java index e8806105f..3a778e13f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java @@ -3,20 +3,28 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Get repos */ @Generated +@JsonSerialize(using = ListReposRequest.ListReposRequestSerializer.class) +@JsonDeserialize(using = ListReposRequest.ListReposRequestDeserializer.class) public class ListReposRequest { /** * Token used to get the next page of results. If not specified, returns the first page of results * as well as a next page token if there are more results. */ - @JsonIgnore - @QueryParam("next_page_token") private String nextPageToken; /** @@ -24,8 +32,6 @@ public class ListReposRequest { * provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from * `/Workspace/Repos` will be served. */ - @JsonIgnore - @QueryParam("path_prefix") private String pathPrefix; public ListReposRequest setNextPageToken(String nextPageToken) { @@ -67,4 +73,40 @@ public String toString() { .add("pathPrefix", pathPrefix) .toString(); } + + ListReposRequestPb toPb() { + ListReposRequestPb pb = new ListReposRequestPb(); + pb.setNextPageToken(nextPageToken); + pb.setPathPrefix(pathPrefix); + + return pb; + } + + static ListReposRequest fromPb(ListReposRequestPb pb) { + ListReposRequest model = new ListReposRequest(); + model.setNextPageToken(pb.getNextPageToken()); + model.setPathPrefix(pb.getPathPrefix()); + + return model; + } + + public static class ListReposRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListReposRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListReposRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListReposRequestDeserializer extends JsonDeserializer { + @Override + public ListReposRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListReposRequestPb pb = mapper.readValue(p, ListReposRequestPb.class); + return ListReposRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequestPb.java new file mode 100755 index 000000000..f41ac53f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequestPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get repos */ +@Generated +class ListReposRequestPb { + @JsonIgnore + @QueryParam("next_page_token") + private String nextPageToken; + + @JsonIgnore + @QueryParam("path_prefix") + private String pathPrefix; + + public ListReposRequestPb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListReposRequestPb setPathPrefix(String pathPrefix) { + this.pathPrefix = pathPrefix; + return this; + } + + public String getPathPrefix() { + return pathPrefix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListReposRequestPb that = (ListReposRequestPb) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(pathPrefix, that.pathPrefix); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, pathPrefix); + } + + @Override + public String toString() { + return new ToStringer(ListReposRequestPb.class) + .add("nextPageToken", nextPageToken) + .add("pathPrefix", pathPrefix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java index 70fd94ae8..9b1ef3da2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListReposResponse.ListReposResponseSerializer.class) +@JsonDeserialize(using = ListReposResponse.ListReposResponseDeserializer.class) public class ListReposResponse { /** * Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the * next page of results. */ - @JsonProperty("next_page_token") private String nextPageToken; /** List of Git folders (repos). */ - @JsonProperty("repos") private Collection repos; public ListReposResponse setNextPageToken(String nextPageToken) { @@ -59,4 +68,40 @@ public String toString() { .add("repos", repos) .toString(); } + + ListReposResponsePb toPb() { + ListReposResponsePb pb = new ListReposResponsePb(); + pb.setNextPageToken(nextPageToken); + pb.setRepos(repos); + + return pb; + } + + static ListReposResponse fromPb(ListReposResponsePb pb) { + ListReposResponse model = new ListReposResponse(); + model.setNextPageToken(pb.getNextPageToken()); + model.setRepos(pb.getRepos()); + + return model; + } + + public static class ListReposResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListReposResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListReposResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListReposResponseDeserializer extends JsonDeserializer { + @Override + public ListReposResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListReposResponsePb pb = mapper.readValue(p, ListReposResponsePb.class); + return ListReposResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponsePb.java new file mode 100755 index 000000000..c36138e42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponsePb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListReposResponsePb { + @JsonProperty("next_page_token") + private String nextPageToken; + + @JsonProperty("repos") + private Collection repos; + + public ListReposResponsePb setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListReposResponsePb setRepos(Collection repos) { + this.repos = repos; + return this; + } + + public Collection getRepos() { + return repos; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListReposResponsePb that = (ListReposResponsePb) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(repos, that.repos); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, repos); + } + + @Override + public String toString() { + return new ToStringer(ListReposResponsePb.class) + .add("nextPageToken", nextPageToken) + .add("repos", repos) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponse.java index a9b051141..2b513d1a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListResponse.ListResponseSerializer.class) +@JsonDeserialize(using = ListResponse.ListResponseDeserializer.class) public class ListResponse { /** List of objects. */ - @JsonProperty("objects") private Collection objects; public ListResponse setObjects(Collection objects) { @@ -40,4 +50,37 @@ public int hashCode() { public String toString() { return new ToStringer(ListResponse.class).add("objects", objects).toString(); } + + ListResponsePb toPb() { + ListResponsePb pb = new ListResponsePb(); + pb.setObjects(objects); + + return pb; + } + + static ListResponse fromPb(ListResponsePb pb) { + ListResponse model = new ListResponse(); + model.setObjects(pb.getObjects()); + + return model; + } + + public static class ListResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListResponseDeserializer extends JsonDeserializer { + @Override + public ListResponse deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListResponsePb pb = mapper.readValue(p, ListResponsePb.class); + return ListResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponsePb.java new file mode 100755 index 000000000..1b6992e41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListResponsePb { + @JsonProperty("objects") + private Collection objects; + + public ListResponsePb setObjects(Collection objects) { + this.objects = objects; + return this; + } + + public Collection getObjects() { + return objects; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListResponsePb that = (ListResponsePb) o; + return Objects.equals(objects, that.objects); + } + + @Override + public int hashCode() { + return Objects.hash(objects); + } + + @Override + public String toString() { + return new ToStringer(ListResponsePb.class).add("objects", objects).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponse.java index fb8a0f333..ba72f7c29 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListScopesResponse.ListScopesResponseSerializer.class) +@JsonDeserialize(using = ListScopesResponse.ListScopesResponseDeserializer.class) public class ListScopesResponse { /** The available secret scopes. */ - @JsonProperty("scopes") private Collection scopes; public ListScopesResponse setScopes(Collection scopes) { @@ -40,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListScopesResponse.class).add("scopes", scopes).toString(); } + + ListScopesResponsePb toPb() { + ListScopesResponsePb pb = new ListScopesResponsePb(); + pb.setScopes(scopes); + + return pb; + } + + static ListScopesResponse fromPb(ListScopesResponsePb pb) { + ListScopesResponse model = new ListScopesResponse(); + model.setScopes(pb.getScopes()); + + return model; + } + + public static class ListScopesResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListScopesResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListScopesResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListScopesResponseDeserializer extends JsonDeserializer { + @Override + public ListScopesResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListScopesResponsePb pb = mapper.readValue(p, ListScopesResponsePb.class); + return ListScopesResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponsePb.java new file mode 100755 index 000000000..7ba2e3c83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListScopesResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListScopesResponsePb { + @JsonProperty("scopes") + private Collection scopes; + + public ListScopesResponsePb setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListScopesResponsePb that = (ListScopesResponsePb) o; + return Objects.equals(scopes, that.scopes); + } + + @Override + public int hashCode() { + return Objects.hash(scopes); + } + + @Override + public String toString() { + return new ToStringer(ListScopesResponsePb.class).add("scopes", scopes).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequest.java index 719a177d1..a91074f83 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequest.java @@ -3,17 +3,25 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List secret keys */ @Generated +@JsonSerialize(using = ListSecretsRequest.ListSecretsRequestSerializer.class) +@JsonDeserialize(using = ListSecretsRequest.ListSecretsRequestDeserializer.class) public class ListSecretsRequest { /** The name of the scope to list secrets within. */ - @JsonIgnore - @QueryParam("scope") private String scope; public ListSecretsRequest setScope(String scope) { @@ -42,4 +50,38 @@ public int hashCode() { public String toString() { return new ToStringer(ListSecretsRequest.class).add("scope", scope).toString(); } + + ListSecretsRequestPb toPb() { + ListSecretsRequestPb pb = new ListSecretsRequestPb(); + pb.setScope(scope); + + return pb; + } + + static ListSecretsRequest fromPb(ListSecretsRequestPb pb) { + ListSecretsRequest model = new ListSecretsRequest(); + model.setScope(pb.getScope()); + + return model; + } + + public static class ListSecretsRequestSerializer extends JsonSerializer { + @Override + public void serialize(ListSecretsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSecretsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSecretsRequestDeserializer extends JsonDeserializer { + @Override + public ListSecretsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSecretsRequestPb pb = mapper.readValue(p, ListSecretsRequestPb.class); + return ListSecretsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequestPb.java new file mode 100755 index 000000000..dd70c1a1e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsRequestPb.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List secret keys */ +@Generated +class ListSecretsRequestPb { + @JsonIgnore + @QueryParam("scope") + private String scope; + + public ListSecretsRequestPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSecretsRequestPb that = (ListSecretsRequestPb) o; + return Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(scope); + } + + @Override + public String toString() { + return new ToStringer(ListSecretsRequestPb.class).add("scope", scope).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java index ac0e79596..c2949cb7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java @@ -4,14 +4,24 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = ListSecretsResponse.ListSecretsResponseSerializer.class) +@JsonDeserialize(using = ListSecretsResponse.ListSecretsResponseDeserializer.class) public class ListSecretsResponse { /** Metadata information of all secrets contained within the given scope. */ - @JsonProperty("secrets") private Collection secrets; public ListSecretsResponse setSecrets(Collection secrets) { @@ -40,4 +50,39 @@ public int hashCode() { public String toString() { return new ToStringer(ListSecretsResponse.class).add("secrets", secrets).toString(); } + + ListSecretsResponsePb toPb() { + ListSecretsResponsePb pb = new ListSecretsResponsePb(); + pb.setSecrets(secrets); + + return pb; + } + + static ListSecretsResponse fromPb(ListSecretsResponsePb pb) { + ListSecretsResponse model = new ListSecretsResponse(); + model.setSecrets(pb.getSecrets()); + + return model; + } + + public static class ListSecretsResponseSerializer extends JsonSerializer { + @Override + public void serialize(ListSecretsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListSecretsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListSecretsResponseDeserializer + extends JsonDeserializer { + @Override + public ListSecretsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListSecretsResponsePb pb = mapper.readValue(p, ListSecretsResponsePb.class); + return ListSecretsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponsePb.java new file mode 100755 index 000000000..857f97be4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponsePb.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class ListSecretsResponsePb { + @JsonProperty("secrets") + private Collection secrets; + + public ListSecretsResponsePb setSecrets(Collection secrets) { + this.secrets = secrets; + return this; + } + + public Collection getSecrets() { + return secrets; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSecretsResponsePb that = (ListSecretsResponsePb) o; + return Objects.equals(secrets, that.secrets); + } + + @Override + public int hashCode() { + return Objects.hash(secrets); + } + + @Override + public String toString() { + return new ToStringer(ListSecretsResponsePb.class).add("secrets", secrets).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java index b6d053f2c..c6301d3e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java @@ -3,22 +3,28 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** List contents */ @Generated +@JsonSerialize(using = ListWorkspaceRequest.ListWorkspaceRequestSerializer.class) +@JsonDeserialize(using = ListWorkspaceRequest.ListWorkspaceRequestDeserializer.class) public class ListWorkspaceRequest { /** UTC timestamp in milliseconds */ - @JsonIgnore - @QueryParam("notebooks_modified_after") private Long notebooksModifiedAfter; /** The absolute path of the notebook or directory. */ - @JsonIgnore - @QueryParam("path") private String path; public ListWorkspaceRequest setNotebooksModifiedAfter(Long notebooksModifiedAfter) { @@ -60,4 +66,42 @@ public String toString() { .add("path", path) .toString(); } + + ListWorkspaceRequestPb toPb() { + ListWorkspaceRequestPb pb = new ListWorkspaceRequestPb(); + pb.setNotebooksModifiedAfter(notebooksModifiedAfter); + pb.setPath(path); + + return pb; + } + + static ListWorkspaceRequest fromPb(ListWorkspaceRequestPb pb) { + ListWorkspaceRequest model = new ListWorkspaceRequest(); + model.setNotebooksModifiedAfter(pb.getNotebooksModifiedAfter()); + model.setPath(pb.getPath()); + + return model; + } + + public static class ListWorkspaceRequestSerializer extends JsonSerializer { + @Override + public void serialize( + ListWorkspaceRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ListWorkspaceRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ListWorkspaceRequestDeserializer + extends JsonDeserializer { + @Override + public ListWorkspaceRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ListWorkspaceRequestPb pb = mapper.readValue(p, ListWorkspaceRequestPb.class); + return ListWorkspaceRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequestPb.java new file mode 100755 index 000000000..d44d2e3b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequestPb.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List contents */ +@Generated +class ListWorkspaceRequestPb { + @JsonIgnore + @QueryParam("notebooks_modified_after") + private Long notebooksModifiedAfter; + + @JsonIgnore + @QueryParam("path") + private String path; + + public ListWorkspaceRequestPb setNotebooksModifiedAfter(Long notebooksModifiedAfter) { + this.notebooksModifiedAfter = notebooksModifiedAfter; + return this; + } + + public Long getNotebooksModifiedAfter() { + return notebooksModifiedAfter; + } + + public ListWorkspaceRequestPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceRequestPb that = (ListWorkspaceRequestPb) o; + return Objects.equals(notebooksModifiedAfter, that.notebooksModifiedAfter) + && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(notebooksModifiedAfter, path); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceRequestPb.class) + .add("notebooksModifiedAfter", notebooksModifiedAfter) + .add("path", path) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java index 348695a0e..89489ddee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java @@ -4,16 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = Mkdirs.MkdirsSerializer.class) +@JsonDeserialize(using = Mkdirs.MkdirsDeserializer.class) public class Mkdirs { /** * The absolute path of the directory. If the parent directories do not exist, it will also create * them. If the directory already exists, this command will do nothing and succeed. */ - @JsonProperty("path") private String path; public Mkdirs setPath(String path) { @@ -42,4 +52,37 @@ public int hashCode() { public String toString() { return new ToStringer(Mkdirs.class).add("path", path).toString(); } + + MkdirsPb toPb() { + MkdirsPb pb = new MkdirsPb(); + pb.setPath(path); + + return pb; + } + + static Mkdirs fromPb(MkdirsPb pb) { + Mkdirs model = new Mkdirs(); + model.setPath(pb.getPath()); + + return model; + } + + public static class MkdirsSerializer extends JsonSerializer { + @Override + public void serialize(Mkdirs value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MkdirsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MkdirsDeserializer extends JsonDeserializer { + @Override + public Mkdirs deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MkdirsPb pb = mapper.readValue(p, MkdirsPb.class); + return Mkdirs.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsPb.java new file mode 100755 index 000000000..cc1122b4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsPb.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class MkdirsPb { + @JsonProperty("path") + private String path; + + public MkdirsPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MkdirsPb that = (MkdirsPb) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path); + } + + @Override + public String toString() { + return new ToStringer(MkdirsPb.class).add("path", path).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java index 4bfe6ad7b..b93c67596 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = MkdirsResponse.MkdirsResponseSerializer.class) +@JsonDeserialize(using = MkdirsResponse.MkdirsResponseDeserializer.class) public class MkdirsResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(MkdirsResponse.class).toString(); } + + MkdirsResponsePb toPb() { + MkdirsResponsePb pb = new MkdirsResponsePb(); + + return pb; + } + + static MkdirsResponse fromPb(MkdirsResponsePb pb) { + MkdirsResponse model = new MkdirsResponse(); + + return model; + } + + public static class MkdirsResponseSerializer extends JsonSerializer { + @Override + public void serialize(MkdirsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + MkdirsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class MkdirsResponseDeserializer extends JsonDeserializer { + @Override + public MkdirsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + MkdirsResponsePb pb = mapper.readValue(p, MkdirsResponsePb.class); + return MkdirsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponsePb.java new file mode 100755 index 000000000..51b86b385 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class MkdirsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(MkdirsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java index 77643ed12..0a82aab90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java @@ -4,28 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** * The information of the object in workspace. It will be returned by ``list`` and ``get-status``. */ @Generated +@JsonSerialize(using = ObjectInfo.ObjectInfoSerializer.class) +@JsonDeserialize(using = ObjectInfo.ObjectInfoDeserializer.class) public class ObjectInfo { /** Only applicable to files. The creation UTC timestamp. */ - @JsonProperty("created_at") private Long createdAt; /** The language of the object. This value is set only if the object type is ``NOTEBOOK``. */ - @JsonProperty("language") private Language language; /** Only applicable to files, the last modified UTC timestamp. */ - @JsonProperty("modified_at") private Long modifiedAt; /** Unique identifier for the object. */ - @JsonProperty("object_id") private Long objectId; /** @@ -35,19 +42,15 @@ public class ObjectInfo { * `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: * Lakeview dashboard */ - @JsonProperty("object_type") private ObjectType objectType; /** The absolute path of the object. */ - @JsonProperty("path") private String path; /** A unique identifier for the object that is consistent across all Databricks APIs. */ - @JsonProperty("resource_id") private String resourceId; /** Only applicable to files. The file size in bytes can be returned. */ - @JsonProperty("size") private Long size; public ObjectInfo setCreatedAt(Long createdAt) { @@ -156,4 +159,51 @@ public String toString() { .add("size", size) .toString(); } + + ObjectInfoPb toPb() { + ObjectInfoPb pb = new ObjectInfoPb(); + pb.setCreatedAt(createdAt); + pb.setLanguage(language); + pb.setModifiedAt(modifiedAt); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + pb.setPath(path); + pb.setResourceId(resourceId); + pb.setSize(size); + + return pb; + } + + static ObjectInfo fromPb(ObjectInfoPb pb) { + ObjectInfo model = new ObjectInfo(); + model.setCreatedAt(pb.getCreatedAt()); + model.setLanguage(pb.getLanguage()); + model.setModifiedAt(pb.getModifiedAt()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + model.setPath(pb.getPath()); + model.setResourceId(pb.getResourceId()); + model.setSize(pb.getSize()); + + return model; + } + + public static class ObjectInfoSerializer extends JsonSerializer { + @Override + public void serialize(ObjectInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + ObjectInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class ObjectInfoDeserializer extends JsonDeserializer { + @Override + public ObjectInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + ObjectInfoPb pb = mapper.readValue(p, ObjectInfoPb.class); + return ObjectInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfoPb.java new file mode 100755 index 000000000..056a868cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfoPb.java @@ -0,0 +1,145 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The information of the object in workspace. It will be returned by ``list`` and ``get-status``. + */ +@Generated +class ObjectInfoPb { + @JsonProperty("created_at") + private Long createdAt; + + @JsonProperty("language") + private Language language; + + @JsonProperty("modified_at") + private Long modifiedAt; + + @JsonProperty("object_id") + private Long objectId; + + @JsonProperty("object_type") + private ObjectType objectType; + + @JsonProperty("path") + private String path; + + @JsonProperty("resource_id") + private String resourceId; + + @JsonProperty("size") + private Long size; + + public ObjectInfoPb setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ObjectInfoPb setLanguage(Language language) { + this.language = language; + return this; + } + + public Language getLanguage() { + return language; + } + + public ObjectInfoPb setModifiedAt(Long modifiedAt) { + this.modifiedAt = modifiedAt; + return this; + } + + public Long getModifiedAt() { + return modifiedAt; + } + + public ObjectInfoPb setObjectId(Long objectId) { + this.objectId = objectId; + return this; + } + + public Long getObjectId() { + return objectId; + } + + public ObjectInfoPb setObjectType(ObjectType objectType) { + this.objectType = objectType; + return this; + } + + public ObjectType getObjectType() { + return objectType; + } + + public ObjectInfoPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public ObjectInfoPb setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public ObjectInfoPb setSize(Long size) { + this.size = size; + return this; + } + + public Long getSize() { + return size; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ObjectInfoPb that = (ObjectInfoPb) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(language, that.language) + && Objects.equals(modifiedAt, that.modifiedAt) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(path, that.path) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(size, that.size); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, language, modifiedAt, objectId, objectType, path, resourceId, size); + } + + @Override + public String toString() { + return new ToStringer(ObjectInfoPb.class) + .add("createdAt", createdAt) + .add("language", language) + .add("modifiedAt", modifiedAt) + .add("objectId", objectId) + .add("objectType", objectType) + .add("path", path) + .add("resourceId", resourceId) + .add("size", size) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java index 7655ef791..927f7575f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PutAcl.PutAclSerializer.class) +@JsonDeserialize(using = PutAcl.PutAclDeserializer.class) public class PutAcl { /** The permission level applied to the principal. */ - @JsonProperty("permission") private AclPermission permission; /** The principal in which the permission is applied. */ - @JsonProperty("principal") private String principal; /** The name of the scope to apply permissions to. */ - @JsonProperty("scope") private String scope; public PutAcl setPermission(AclPermission permission) { @@ -71,4 +79,41 @@ public String toString() { .add("scope", scope) .toString(); } + + PutAclPb toPb() { + PutAclPb pb = new PutAclPb(); + pb.setPermission(permission); + pb.setPrincipal(principal); + pb.setScope(scope); + + return pb; + } + + static PutAcl fromPb(PutAclPb pb) { + PutAcl model = new PutAcl(); + model.setPermission(pb.getPermission()); + model.setPrincipal(pb.getPrincipal()); + model.setScope(pb.getScope()); + + return model; + } + + public static class PutAclSerializer extends JsonSerializer { + @Override + public void serialize(PutAcl value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutAclPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutAclDeserializer extends JsonDeserializer { + @Override + public PutAcl deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutAclPb pb = mapper.readValue(p, PutAclPb.class); + return PutAcl.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclPb.java new file mode 100755 index 000000000..7d16f1c8f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclPb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PutAclPb { + @JsonProperty("permission") + private AclPermission permission; + + @JsonProperty("principal") + private String principal; + + @JsonProperty("scope") + private String scope; + + public PutAclPb setPermission(AclPermission permission) { + this.permission = permission; + return this; + } + + public AclPermission getPermission() { + return permission; + } + + public PutAclPb setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PutAclPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutAclPb that = (PutAclPb) o; + return Objects.equals(permission, that.permission) + && Objects.equals(principal, that.principal) + && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(permission, principal, scope); + } + + @Override + public String toString() { + return new ToStringer(PutAclPb.class) + .add("permission", permission) + .add("principal", principal) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java index 197b3373f..0754a59d5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PutAclResponse.PutAclResponseSerializer.class) +@JsonDeserialize(using = PutAclResponse.PutAclResponseDeserializer.class) public class PutAclResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(PutAclResponse.class).toString(); } + + PutAclResponsePb toPb() { + PutAclResponsePb pb = new PutAclResponsePb(); + + return pb; + } + + static PutAclResponse fromPb(PutAclResponsePb pb) { + PutAclResponse model = new PutAclResponse(); + + return model; + } + + public static class PutAclResponseSerializer extends JsonSerializer { + @Override + public void serialize(PutAclResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutAclResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutAclResponseDeserializer extends JsonDeserializer { + @Override + public PutAclResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutAclResponsePb pb = mapper.readValue(p, PutAclResponsePb.class); + return PutAclResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponsePb.java new file mode 100755 index 000000000..5433d3b44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PutAclResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PutAclResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java index 0832a2ad1..5169fb406 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PutSecret.PutSecretSerializer.class) +@JsonDeserialize(using = PutSecret.PutSecretDeserializer.class) public class PutSecret { /** If specified, value will be stored as bytes. */ - @JsonProperty("bytes_value") private String bytesValue; /** A unique name to identify the secret. */ - @JsonProperty("key") private String key; /** The name of the scope to which the secret will be associated with. */ - @JsonProperty("scope") private String scope; /** If specified, note that the value will be stored in UTF-8 (MB4) form. */ - @JsonProperty("string_value") private String stringValue; public PutSecret setBytesValue(String bytesValue) { @@ -86,4 +93,43 @@ public String toString() { .add("stringValue", stringValue) .toString(); } + + PutSecretPb toPb() { + PutSecretPb pb = new PutSecretPb(); + pb.setBytesValue(bytesValue); + pb.setKey(key); + pb.setScope(scope); + pb.setStringValue(stringValue); + + return pb; + } + + static PutSecret fromPb(PutSecretPb pb) { + PutSecret model = new PutSecret(); + model.setBytesValue(pb.getBytesValue()); + model.setKey(pb.getKey()); + model.setScope(pb.getScope()); + model.setStringValue(pb.getStringValue()); + + return model; + } + + public static class PutSecretSerializer extends JsonSerializer { + @Override + public void serialize(PutSecret value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutSecretPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutSecretDeserializer extends JsonDeserializer { + @Override + public PutSecret deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutSecretPb pb = mapper.readValue(p, PutSecretPb.class); + return PutSecret.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretPb.java new file mode 100755 index 000000000..0ccd6fd60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class PutSecretPb { + @JsonProperty("bytes_value") + private String bytesValue; + + @JsonProperty("key") + private String key; + + @JsonProperty("scope") + private String scope; + + @JsonProperty("string_value") + private String stringValue; + + public PutSecretPb setBytesValue(String bytesValue) { + this.bytesValue = bytesValue; + return this; + } + + public String getBytesValue() { + return bytesValue; + } + + public PutSecretPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public PutSecretPb setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + public PutSecretPb setStringValue(String stringValue) { + this.stringValue = stringValue; + return this; + } + + public String getStringValue() { + return stringValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutSecretPb that = (PutSecretPb) o; + return Objects.equals(bytesValue, that.bytesValue) + && Objects.equals(key, that.key) + && Objects.equals(scope, that.scope) + && Objects.equals(stringValue, that.stringValue); + } + + @Override + public int hashCode() { + return Objects.hash(bytesValue, key, scope, stringValue); + } + + @Override + public String toString() { + return new ToStringer(PutSecretPb.class) + .add("bytesValue", bytesValue) + .add("key", key) + .add("scope", scope) + .add("stringValue", stringValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java index 83efb2e8e..c2fb91ebf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = PutSecretResponse.PutSecretResponseSerializer.class) +@JsonDeserialize(using = PutSecretResponse.PutSecretResponseDeserializer.class) public class PutSecretResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(PutSecretResponse.class).toString(); } + + PutSecretResponsePb toPb() { + PutSecretResponsePb pb = new PutSecretResponsePb(); + + return pb; + } + + static PutSecretResponse fromPb(PutSecretResponsePb pb) { + PutSecretResponse model = new PutSecretResponse(); + + return model; + } + + public static class PutSecretResponseSerializer extends JsonSerializer { + @Override + public void serialize(PutSecretResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + PutSecretResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class PutSecretResponseDeserializer extends JsonDeserializer { + @Override + public PutSecretResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + PutSecretResponsePb pb = mapper.readValue(p, PutSecretResponsePb.class); + return PutSecretResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponsePb.java new file mode 100755 index 000000000..e638b9640 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class PutSecretResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(PutSecretResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java index 1e507c3d6..3d88415dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java @@ -4,25 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RepoAccessControlRequest.RepoAccessControlRequestSerializer.class) +@JsonDeserialize(using = RepoAccessControlRequest.RepoAccessControlRequestDeserializer.class) public class RepoAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public RepoAccessControlRequest setGroupName(String groupName) { @@ -86,4 +93,47 @@ public String toString() { .add("userName", userName) .toString(); } + + RepoAccessControlRequestPb toPb() { + RepoAccessControlRequestPb pb = new RepoAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static RepoAccessControlRequest fromPb(RepoAccessControlRequestPb pb) { + RepoAccessControlRequest model = new RepoAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class RepoAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RepoAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public RepoAccessControlRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoAccessControlRequestPb pb = mapper.readValue(p, RepoAccessControlRequestPb.class); + return RepoAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequestPb.java new file mode 100755 index 000000000..8f695fcac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RepoAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private RepoPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public RepoAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public RepoAccessControlRequestPb setPermissionLevel(RepoPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RepoPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public RepoAccessControlRequestPb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RepoAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoAccessControlRequestPb that = (RepoAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RepoAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java index dfa24748c..9b55c685d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java @@ -4,30 +4,36 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepoAccessControlResponse.RepoAccessControlResponseSerializer.class) +@JsonDeserialize(using = RepoAccessControlResponse.RepoAccessControlResponseDeserializer.class) public class RepoAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public RepoAccessControlResponse setAllPermissions(Collection allPermissions) { @@ -102,4 +108,49 @@ public String toString() { .add("userName", userName) .toString(); } + + RepoAccessControlResponsePb toPb() { + RepoAccessControlResponsePb pb = new RepoAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static RepoAccessControlResponse fromPb(RepoAccessControlResponsePb pb) { + RepoAccessControlResponse model = new RepoAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class RepoAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + RepoAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public RepoAccessControlResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoAccessControlResponsePb pb = mapper.readValue(p, RepoAccessControlResponsePb.class); + return RepoAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponsePb.java new file mode 100755 index 000000000..83dbf6c24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponsePb.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepoAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public RepoAccessControlResponsePb setAllPermissions(Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public RepoAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public RepoAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public RepoAccessControlResponsePb setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RepoAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoAccessControlResponsePb that = (RepoAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RepoAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java index 403fd581c..e08e12021 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java @@ -4,38 +4,42 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; /** Git folder (repo) information. */ @Generated +@JsonSerialize(using = RepoInfo.RepoInfoSerializer.class) +@JsonDeserialize(using = RepoInfo.RepoInfoDeserializer.class) public class RepoInfo { /** Name of the current git branch of the git folder (repo). */ - @JsonProperty("branch") private String branch; /** Current git commit id of the git folder (repo). */ - @JsonProperty("head_commit_id") private String headCommitId; /** Id of the git folder (repo) in the Workspace. */ - @JsonProperty("id") private Long id; /** Root path of the git folder (repo) in the Workspace. */ - @JsonProperty("path") private String path; /** Git provider of the remote git repository, e.g. `gitHub`. */ - @JsonProperty("provider") private String provider; /** Sparse checkout config for the git folder (repo). */ - @JsonProperty("sparse_checkout") private SparseCheckout sparseCheckout; /** URL of the remote git repository. */ - @JsonProperty("url") private String url; public RepoInfo setBranch(String branch) { @@ -132,4 +136,49 @@ public String toString() { .add("url", url) .toString(); } + + RepoInfoPb toPb() { + RepoInfoPb pb = new RepoInfoPb(); + pb.setBranch(branch); + pb.setHeadCommitId(headCommitId); + pb.setId(id); + pb.setPath(path); + pb.setProvider(provider); + pb.setSparseCheckout(sparseCheckout); + pb.setUrl(url); + + return pb; + } + + static RepoInfo fromPb(RepoInfoPb pb) { + RepoInfo model = new RepoInfo(); + model.setBranch(pb.getBranch()); + model.setHeadCommitId(pb.getHeadCommitId()); + model.setId(pb.getId()); + model.setPath(pb.getPath()); + model.setProvider(pb.getProvider()); + model.setSparseCheckout(pb.getSparseCheckout()); + model.setUrl(pb.getUrl()); + + return model; + } + + public static class RepoInfoSerializer extends JsonSerializer { + @Override + public void serialize(RepoInfo value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoInfoPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoInfoDeserializer extends JsonDeserializer { + @Override + public RepoInfo deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoInfoPb pb = mapper.readValue(p, RepoInfoPb.class); + return RepoInfo.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfoPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfoPb.java new file mode 100755 index 000000000..8ab8e2ebe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfoPb.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Git folder (repo) information. */ +@Generated +class RepoInfoPb { + @JsonProperty("branch") + private String branch; + + @JsonProperty("head_commit_id") + private String headCommitId; + + @JsonProperty("id") + private Long id; + + @JsonProperty("path") + private String path; + + @JsonProperty("provider") + private String provider; + + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + @JsonProperty("url") + private String url; + + public RepoInfoPb setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public RepoInfoPb setHeadCommitId(String headCommitId) { + this.headCommitId = headCommitId; + return this; + } + + public String getHeadCommitId() { + return headCommitId; + } + + public RepoInfoPb setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public RepoInfoPb setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public RepoInfoPb setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public RepoInfoPb setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public RepoInfoPb setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoInfoPb that = (RepoInfoPb) o; + return Objects.equals(branch, that.branch) + && Objects.equals(headCommitId, that.headCommitId) + && Objects.equals(id, that.id) + && Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(RepoInfoPb.class) + .add("branch", branch) + .add("headCommitId", headCommitId) + .add("id", id) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java index 2ec71c3bf..ab5d7a65b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepoPermission.RepoPermissionSerializer.class) +@JsonDeserialize(using = RepoPermission.RepoPermissionDeserializer.class) public class RepoPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; public RepoPermission setInherited(Boolean inherited) { @@ -72,4 +80,42 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + RepoPermissionPb toPb() { + RepoPermissionPb pb = new RepoPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static RepoPermission fromPb(RepoPermissionPb pb) { + RepoPermission model = new RepoPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class RepoPermissionSerializer extends JsonSerializer { + @Override + public void serialize(RepoPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoPermissionDeserializer extends JsonDeserializer { + @Override + public RepoPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoPermissionPb pb = mapper.readValue(p, RepoPermissionPb.class); + return RepoPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionPb.java new file mode 100755 index 000000000..03e685676 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepoPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private RepoPermissionLevel permissionLevel; + + public RepoPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public RepoPermissionPb setInheritedFromObject(Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public RepoPermissionPb setPermissionLevel(RepoPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RepoPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoPermissionPb that = (RepoPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(RepoPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissions.java index e5a61c459..0d05cb517 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepoPermissions.RepoPermissionsSerializer.class) +@JsonDeserialize(using = RepoPermissions.RepoPermissionsDeserializer.class) public class RepoPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public RepoPermissions setAccessControlList( @@ -73,4 +81,42 @@ public String toString() { .add("objectType", objectType) .toString(); } + + RepoPermissionsPb toPb() { + RepoPermissionsPb pb = new RepoPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static RepoPermissions fromPb(RepoPermissionsPb pb) { + RepoPermissions model = new RepoPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class RepoPermissionsSerializer extends JsonSerializer { + @Override + public void serialize(RepoPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoPermissionsDeserializer extends JsonDeserializer { + @Override + public RepoPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoPermissionsPb pb = mapper.readValue(p, RepoPermissionsPb.class); + return RepoPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java index fce7b9207..73e0e91fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = RepoPermissionsDescription.RepoPermissionsDescriptionSerializer.class) +@JsonDeserialize(using = RepoPermissionsDescription.RepoPermissionsDescriptionDeserializer.class) public class RepoPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; public RepoPermissionsDescription setDescription(String description) { @@ -56,4 +65,43 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + RepoPermissionsDescriptionPb toPb() { + RepoPermissionsDescriptionPb pb = new RepoPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static RepoPermissionsDescription fromPb(RepoPermissionsDescriptionPb pb) { + RepoPermissionsDescription model = new RepoPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class RepoPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + RepoPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public RepoPermissionsDescription deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoPermissionsDescriptionPb pb = mapper.readValue(p, RepoPermissionsDescriptionPb.class); + return RepoPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescriptionPb.java new file mode 100755 index 000000000..c200ccbcb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescriptionPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class RepoPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private RepoPermissionLevel permissionLevel; + + public RepoPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public RepoPermissionsDescriptionPb setPermissionLevel(RepoPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public RepoPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoPermissionsDescriptionPb that = (RepoPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(RepoPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsPb.java new file mode 100755 index 000000000..bd3836913 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepoPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public RepoPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public RepoPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public RepoPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoPermissionsPb that = (RepoPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(RepoPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequest.java index 7b200418d..d1fd69c93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequest.java @@ -4,19 +4,28 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = RepoPermissionsRequest.RepoPermissionsRequestSerializer.class) +@JsonDeserialize(using = RepoPermissionsRequest.RepoPermissionsRequestDeserializer.class) public class RepoPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The repo for which to get or manage permissions. */ - @JsonIgnore private String repoId; + private String repoId; public RepoPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -58,4 +67,43 @@ public String toString() { .add("repoId", repoId) .toString(); } + + RepoPermissionsRequestPb toPb() { + RepoPermissionsRequestPb pb = new RepoPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setRepoId(repoId); + + return pb; + } + + static RepoPermissionsRequest fromPb(RepoPermissionsRequestPb pb) { + RepoPermissionsRequest model = new RepoPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setRepoId(pb.getRepoId()); + + return model; + } + + public static class RepoPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + RepoPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + RepoPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class RepoPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public RepoPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + RepoPermissionsRequestPb pb = mapper.readValue(p, RepoPermissionsRequestPb.class); + return RepoPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequestPb.java new file mode 100755 index 000000000..760eeb330 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsRequestPb.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class RepoPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String repoId; + + public RepoPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public RepoPermissionsRequestPb setRepoId(String repoId) { + this.repoId = repoId; + return this; + } + + public String getRepoId() { + return repoId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoPermissionsRequestPb that = (RepoPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(repoId, that.repoId); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, repoId); + } + + @Override + public String toString() { + return new ToStringer(RepoPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("repoId", repoId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java index aa7b08443..add1b1bbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java @@ -21,7 +21,7 @@ public CreateRepoResponse create(CreateRepoRequest request) { String path = "/api/2.0/repos"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateRepoResponse.class); @@ -35,7 +35,7 @@ public void delete(DeleteRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); try { Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteRepoResponse.class); } catch (IOException e) { @@ -48,7 +48,7 @@ public GetRepoResponse get(GetRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetRepoResponse.class); } catch (IOException e) { @@ -63,7 +63,7 @@ public GetRepoPermissionLevelsResponse getPermissionLevels( String.format("/api/2.0/permissions/repos/%s/permissionLevels", request.getRepoId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetRepoPermissionLevelsResponse.class); } catch (IOException e) { @@ -76,7 +76,7 @@ public RepoPermissions getPermissions(GetRepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, RepoPermissions.class); } catch (IOException e) { @@ -89,7 +89,7 @@ public ListReposResponse list(ListReposRequest request) { String path = "/api/2.0/repos"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListReposResponse.class); } catch (IOException e) { @@ -102,7 +102,7 @@ public RepoPermissions setPermissions(RepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RepoPermissions.class); @@ -116,7 +116,7 @@ public void update(UpdateRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateRepoResponse.class); @@ -130,7 +130,7 @@ public RepoPermissions updatePermissions(RepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, RepoPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java index 2883feb1c..f2513d610 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java @@ -4,17 +4,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SecretMetadata.SecretMetadataSerializer.class) +@JsonDeserialize(using = SecretMetadata.SecretMetadataDeserializer.class) public class SecretMetadata { /** A unique name to identify the secret. */ - @JsonProperty("key") private String key; /** The last updated timestamp (in milliseconds) for the secret. */ - @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; public SecretMetadata setKey(String key) { @@ -56,4 +65,40 @@ public String toString() { .add("lastUpdatedTimestamp", lastUpdatedTimestamp) .toString(); } + + SecretMetadataPb toPb() { + SecretMetadataPb pb = new SecretMetadataPb(); + pb.setKey(key); + pb.setLastUpdatedTimestamp(lastUpdatedTimestamp); + + return pb; + } + + static SecretMetadata fromPb(SecretMetadataPb pb) { + SecretMetadata model = new SecretMetadata(); + model.setKey(pb.getKey()); + model.setLastUpdatedTimestamp(pb.getLastUpdatedTimestamp()); + + return model; + } + + public static class SecretMetadataSerializer extends JsonSerializer { + @Override + public void serialize(SecretMetadata value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SecretMetadataPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SecretMetadataDeserializer extends JsonDeserializer { + @Override + public SecretMetadata deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SecretMetadataPb pb = mapper.readValue(p, SecretMetadataPb.class); + return SecretMetadata.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadataPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadataPb.java new file mode 100755 index 000000000..d66595405 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadataPb.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SecretMetadataPb { + @JsonProperty("key") + private String key; + + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + public SecretMetadataPb setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public SecretMetadataPb setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SecretMetadataPb that = (SecretMetadataPb) o; + return Objects.equals(key, that.key) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp); + } + + @Override + public int hashCode() { + return Objects.hash(key, lastUpdatedTimestamp); + } + + @Override + public String toString() { + return new ToStringer(SecretMetadataPb.class) + .add("key", key) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java index b86511c9d..e90c0c809 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java @@ -4,21 +4,29 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = SecretScope.SecretScopeSerializer.class) +@JsonDeserialize(using = SecretScope.SecretScopeDeserializer.class) public class SecretScope { /** The type of secret scope backend. */ - @JsonProperty("backend_type") private ScopeBackendType backendType; /** The metadata for the secret scope if the type is `AZURE_KEYVAULT` */ - @JsonProperty("keyvault_metadata") private AzureKeyVaultSecretScopeMetadata keyvaultMetadata; /** A unique name to identify the secret scope. */ - @JsonProperty("name") private String name; public SecretScope setBackendType(ScopeBackendType backendType) { @@ -71,4 +79,41 @@ public String toString() { .add("name", name) .toString(); } + + SecretScopePb toPb() { + SecretScopePb pb = new SecretScopePb(); + pb.setBackendType(backendType); + pb.setKeyvaultMetadata(keyvaultMetadata); + pb.setName(name); + + return pb; + } + + static SecretScope fromPb(SecretScopePb pb) { + SecretScope model = new SecretScope(); + model.setBackendType(pb.getBackendType()); + model.setKeyvaultMetadata(pb.getKeyvaultMetadata()); + model.setName(pb.getName()); + + return model; + } + + public static class SecretScopeSerializer extends JsonSerializer { + @Override + public void serialize(SecretScope value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SecretScopePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SecretScopeDeserializer extends JsonDeserializer { + @Override + public SecretScope deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SecretScopePb pb = mapper.readValue(p, SecretScopePb.class); + return SecretScope.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScopePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScopePb.java new file mode 100755 index 000000000..b28b4aa3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScopePb.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class SecretScopePb { + @JsonProperty("backend_type") + private ScopeBackendType backendType; + + @JsonProperty("keyvault_metadata") + private AzureKeyVaultSecretScopeMetadata keyvaultMetadata; + + @JsonProperty("name") + private String name; + + public SecretScopePb setBackendType(ScopeBackendType backendType) { + this.backendType = backendType; + return this; + } + + public ScopeBackendType getBackendType() { + return backendType; + } + + public SecretScopePb setKeyvaultMetadata(AzureKeyVaultSecretScopeMetadata keyvaultMetadata) { + this.keyvaultMetadata = keyvaultMetadata; + return this; + } + + public AzureKeyVaultSecretScopeMetadata getKeyvaultMetadata() { + return keyvaultMetadata; + } + + public SecretScopePb setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SecretScopePb that = (SecretScopePb) o; + return Objects.equals(backendType, that.backendType) + && Objects.equals(keyvaultMetadata, that.keyvaultMetadata) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(backendType, keyvaultMetadata, name); + } + + @Override + public String toString() { + return new ToStringer(SecretScopePb.class) + .add("backendType", backendType) + .add("keyvaultMetadata", keyvaultMetadata) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java index 78a15a19c..d4b3384a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java @@ -21,7 +21,7 @@ public void createScope(CreateScope request) { String path = "/api/2.0/secrets/scopes/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CreateScopeResponse.class); @@ -35,7 +35,7 @@ public void deleteAcl(DeleteAcl request) { String path = "/api/2.0/secrets/acls/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteAclResponse.class); @@ -49,7 +49,7 @@ public void deleteScope(DeleteScope request) { String path = "/api/2.0/secrets/scopes/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteScopeResponse.class); @@ -63,7 +63,7 @@ public void deleteSecret(DeleteSecret request) { String path = "/api/2.0/secrets/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteSecretResponse.class); @@ -77,7 +77,7 @@ public AclItem getAcl(GetAclRequest request) { String path = "/api/2.0/secrets/acls/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, AclItem.class); } catch (IOException e) { @@ -90,7 +90,7 @@ public GetSecretResponse getSecret(GetSecretRequest request) { String path = "/api/2.0/secrets/get"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetSecretResponse.class); } catch (IOException e) { @@ -103,7 +103,7 @@ public ListAclsResponse listAcls(ListAclsRequest request) { String path = "/api/2.0/secrets/acls/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListAclsResponse.class); } catch (IOException e) { @@ -128,7 +128,7 @@ public ListSecretsResponse listSecrets(ListSecretsRequest request) { String path = "/api/2.0/secrets/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListSecretsResponse.class); } catch (IOException e) { @@ -141,7 +141,7 @@ public void putAcl(PutAcl request) { String path = "/api/2.0/secrets/acls/put"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PutAclResponse.class); @@ -155,7 +155,7 @@ public void putSecret(PutSecret request) { String path = "/api/2.0/secrets/put"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PutSecretResponse.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java index eb1201e43..4eb812d69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Sparse checkout configuration, it contains options like cone patterns. */ @Generated +@JsonSerialize(using = SparseCheckout.SparseCheckoutSerializer.class) +@JsonDeserialize(using = SparseCheckout.SparseCheckoutDeserializer.class) public class SparseCheckout { /** * List of sparse checkout cone patterns, see [cone mode handling] for details. @@ -17,7 +28,6 @@ public class SparseCheckout { *

[cone mode handling]: * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling */ - @JsonProperty("patterns") private Collection patterns; public SparseCheckout setPatterns(Collection patterns) { @@ -46,4 +56,38 @@ public int hashCode() { public String toString() { return new ToStringer(SparseCheckout.class).add("patterns", patterns).toString(); } + + SparseCheckoutPb toPb() { + SparseCheckoutPb pb = new SparseCheckoutPb(); + pb.setPatterns(patterns); + + return pb; + } + + static SparseCheckout fromPb(SparseCheckoutPb pb) { + SparseCheckout model = new SparseCheckout(); + model.setPatterns(pb.getPatterns()); + + return model; + } + + public static class SparseCheckoutSerializer extends JsonSerializer { + @Override + public void serialize(SparseCheckout value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparseCheckoutPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparseCheckoutDeserializer extends JsonDeserializer { + @Override + public SparseCheckout deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparseCheckoutPb pb = mapper.readValue(p, SparseCheckoutPb.class); + return SparseCheckout.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutPb.java new file mode 100755 index 000000000..8fb14dea8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutPb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Sparse checkout configuration, it contains options like cone patterns. */ +@Generated +class SparseCheckoutPb { + @JsonProperty("patterns") + private Collection patterns; + + public SparseCheckoutPb setPatterns(Collection patterns) { + this.patterns = patterns; + return this; + } + + public Collection getPatterns() { + return patterns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparseCheckoutPb that = (SparseCheckoutPb) o; + return Objects.equals(patterns, that.patterns); + } + + @Override + public int hashCode() { + return Objects.hash(patterns); + } + + @Override + public String toString() { + return new ToStringer(SparseCheckoutPb.class).add("patterns", patterns).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java index 644efa487..d59c1af02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java @@ -4,12 +4,23 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; /** Sparse checkout configuration, it contains options like cone patterns. */ @Generated +@JsonSerialize(using = SparseCheckoutUpdate.SparseCheckoutUpdateSerializer.class) +@JsonDeserialize(using = SparseCheckoutUpdate.SparseCheckoutUpdateDeserializer.class) public class SparseCheckoutUpdate { /** * List of sparse checkout cone patterns, see [cone mode handling] for details. @@ -17,7 +28,6 @@ public class SparseCheckoutUpdate { *

[cone mode handling]: * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling */ - @JsonProperty("patterns") private Collection patterns; public SparseCheckoutUpdate setPatterns(Collection patterns) { @@ -46,4 +56,40 @@ public int hashCode() { public String toString() { return new ToStringer(SparseCheckoutUpdate.class).add("patterns", patterns).toString(); } + + SparseCheckoutUpdatePb toPb() { + SparseCheckoutUpdatePb pb = new SparseCheckoutUpdatePb(); + pb.setPatterns(patterns); + + return pb; + } + + static SparseCheckoutUpdate fromPb(SparseCheckoutUpdatePb pb) { + SparseCheckoutUpdate model = new SparseCheckoutUpdate(); + model.setPatterns(pb.getPatterns()); + + return model; + } + + public static class SparseCheckoutUpdateSerializer extends JsonSerializer { + @Override + public void serialize( + SparseCheckoutUpdate value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + SparseCheckoutUpdatePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class SparseCheckoutUpdateDeserializer + extends JsonDeserializer { + @Override + public SparseCheckoutUpdate deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + SparseCheckoutUpdatePb pb = mapper.readValue(p, SparseCheckoutUpdatePb.class); + return SparseCheckoutUpdate.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdatePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdatePb.java new file mode 100755 index 000000000..e4cb37600 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdatePb.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Sparse checkout configuration, it contains options like cone patterns. */ +@Generated +class SparseCheckoutUpdatePb { + @JsonProperty("patterns") + private Collection patterns; + + public SparseCheckoutUpdatePb setPatterns(Collection patterns) { + this.patterns = patterns; + return this; + } + + public Collection getPatterns() { + return patterns; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparseCheckoutUpdatePb that = (SparseCheckoutUpdatePb) o; + return Objects.equals(patterns, that.patterns); + } + + @Override + public int hashCode() { + return Objects.hash(patterns); + } + + @Override + public String toString() { + return new ToStringer(SparseCheckoutUpdatePb.class).add("patterns", patterns).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java index 620795bf8..14795c921 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java @@ -4,21 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCredentialsRequest.UpdateCredentialsRequestSerializer.class) +@JsonDeserialize(using = UpdateCredentialsRequest.UpdateCredentialsRequestDeserializer.class) public class UpdateCredentialsRequest { /** The ID for the corresponding credential to access. */ - @JsonIgnore private Long credentialId; + private Long credentialId; /** * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ - @JsonProperty("git_provider") private String gitProvider; /** @@ -29,7 +38,6 @@ public class UpdateCredentialsRequest { * please see your provider's Personal Access Token authentication documentation to see what is * supported. */ - @JsonProperty("git_username") private String gitUsername; /** @@ -38,7 +46,6 @@ public class UpdateCredentialsRequest { * *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html */ - @JsonProperty("personal_access_token") private String personalAccessToken; public UpdateCredentialsRequest setCredentialId(Long credentialId) { @@ -102,4 +109,47 @@ public String toString() { .add("personalAccessToken", personalAccessToken) .toString(); } + + UpdateCredentialsRequestPb toPb() { + UpdateCredentialsRequestPb pb = new UpdateCredentialsRequestPb(); + pb.setCredentialId(credentialId); + pb.setGitProvider(gitProvider); + pb.setGitUsername(gitUsername); + pb.setPersonalAccessToken(personalAccessToken); + + return pb; + } + + static UpdateCredentialsRequest fromPb(UpdateCredentialsRequestPb pb) { + UpdateCredentialsRequest model = new UpdateCredentialsRequest(); + model.setCredentialId(pb.getCredentialId()); + model.setGitProvider(pb.getGitProvider()); + model.setGitUsername(pb.getGitUsername()); + model.setPersonalAccessToken(pb.getPersonalAccessToken()); + + return model; + } + + public static class UpdateCredentialsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCredentialsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCredentialsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCredentialsRequestDeserializer + extends JsonDeserializer { + @Override + public UpdateCredentialsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCredentialsRequestPb pb = mapper.readValue(p, UpdateCredentialsRequestPb.class); + return UpdateCredentialsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequestPb.java new file mode 100755 index 000000000..0a9b96675 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateCredentialsRequestPb { + @JsonIgnore private Long credentialId; + + @JsonProperty("git_provider") + private String gitProvider; + + @JsonProperty("git_username") + private String gitUsername; + + @JsonProperty("personal_access_token") + private String personalAccessToken; + + public UpdateCredentialsRequestPb setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + public UpdateCredentialsRequestPb setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public UpdateCredentialsRequestPb setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; + return this; + } + + public String getGitUsername() { + return gitUsername; + } + + public UpdateCredentialsRequestPb setPersonalAccessToken(String personalAccessToken) { + this.personalAccessToken = personalAccessToken; + return this; + } + + public String getPersonalAccessToken() { + return personalAccessToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCredentialsRequestPb that = (UpdateCredentialsRequestPb) o; + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername) + && Objects.equals(personalAccessToken, that.personalAccessToken); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId, gitProvider, gitUsername, personalAccessToken); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialsRequestPb.class) + .add("credentialId", credentialId) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .add("personalAccessToken", personalAccessToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java index 20e001bd3..7db23ca17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateCredentialsResponse.UpdateCredentialsResponseSerializer.class) +@JsonDeserialize(using = UpdateCredentialsResponse.UpdateCredentialsResponseDeserializer.class) public class UpdateCredentialsResponse { @Override @@ -25,4 +37,39 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateCredentialsResponse.class).toString(); } + + UpdateCredentialsResponsePb toPb() { + UpdateCredentialsResponsePb pb = new UpdateCredentialsResponsePb(); + + return pb; + } + + static UpdateCredentialsResponse fromPb(UpdateCredentialsResponsePb pb) { + UpdateCredentialsResponse model = new UpdateCredentialsResponse(); + + return model; + } + + public static class UpdateCredentialsResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + UpdateCredentialsResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateCredentialsResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateCredentialsResponseDeserializer + extends JsonDeserializer { + @Override + public UpdateCredentialsResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateCredentialsResponsePb pb = mapper.readValue(p, UpdateCredentialsResponsePb.class); + return UpdateCredentialsResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponsePb.java new file mode 100755 index 000000000..22e847844 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateCredentialsResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialsResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java index 107125ef8..d52ce9d2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java @@ -4,24 +4,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRepoRequest.UpdateRepoRequestSerializer.class) +@JsonDeserialize(using = UpdateRepoRequest.UpdateRepoRequestDeserializer.class) public class UpdateRepoRequest { /** Branch that the local version of the repo is checked out to. */ - @JsonProperty("branch") private String branch; /** ID of the Git folder (repo) object in the workspace. */ - @JsonIgnore private Long repoId; + private Long repoId; /** * If specified, update the sparse checkout settings. The update will fail if sparse checkout is * not enabled for the repo. */ - @JsonProperty("sparse_checkout") private SparseCheckoutUpdate sparseCheckout; /** @@ -29,7 +37,6 @@ public class UpdateRepoRequest { * repo in a detached HEAD state. Before committing new changes, you must update the repo to a * branch instead of the detached HEAD. */ - @JsonProperty("tag") private String tag; public UpdateRepoRequest setBranch(String branch) { @@ -93,4 +100,44 @@ public String toString() { .add("tag", tag) .toString(); } + + UpdateRepoRequestPb toPb() { + UpdateRepoRequestPb pb = new UpdateRepoRequestPb(); + pb.setBranch(branch); + pb.setRepoId(repoId); + pb.setSparseCheckout(sparseCheckout); + pb.setTag(tag); + + return pb; + } + + static UpdateRepoRequest fromPb(UpdateRepoRequestPb pb) { + UpdateRepoRequest model = new UpdateRepoRequest(); + model.setBranch(pb.getBranch()); + model.setRepoId(pb.getRepoId()); + model.setSparseCheckout(pb.getSparseCheckout()); + model.setTag(pb.getTag()); + + return model; + } + + public static class UpdateRepoRequestSerializer extends JsonSerializer { + @Override + public void serialize(UpdateRepoRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRepoRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRepoRequestDeserializer extends JsonDeserializer { + @Override + public UpdateRepoRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRepoRequestPb pb = mapper.readValue(p, UpdateRepoRequestPb.class); + return UpdateRepoRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequestPb.java new file mode 100755 index 000000000..5ba19a10e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequestPb.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class UpdateRepoRequestPb { + @JsonProperty("branch") + private String branch; + + @JsonIgnore private Long repoId; + + @JsonProperty("sparse_checkout") + private SparseCheckoutUpdate sparseCheckout; + + @JsonProperty("tag") + private String tag; + + public UpdateRepoRequestPb setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public UpdateRepoRequestPb setRepoId(Long repoId) { + this.repoId = repoId; + return this; + } + + public Long getRepoId() { + return repoId; + } + + public UpdateRepoRequestPb setSparseCheckout(SparseCheckoutUpdate sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckoutUpdate getSparseCheckout() { + return sparseCheckout; + } + + public UpdateRepoRequestPb setTag(String tag) { + this.tag = tag; + return this; + } + + public String getTag() { + return tag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRepoRequestPb that = (UpdateRepoRequestPb) o; + return Objects.equals(branch, that.branch) + && Objects.equals(repoId, that.repoId) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(tag, that.tag); + } + + @Override + public int hashCode() { + return Objects.hash(branch, repoId, sparseCheckout, tag); + } + + @Override + public String toString() { + return new ToStringer(UpdateRepoRequestPb.class) + .add("branch", branch) + .add("repoId", repoId) + .add("sparseCheckout", sparseCheckout) + .add("tag", tag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java index c7d596164..9e329dd80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java @@ -4,9 +4,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize(using = UpdateRepoResponse.UpdateRepoResponseSerializer.class) +@JsonDeserialize(using = UpdateRepoResponse.UpdateRepoResponseDeserializer.class) public class UpdateRepoResponse { @Override @@ -25,4 +37,36 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateRepoResponse.class).toString(); } + + UpdateRepoResponsePb toPb() { + UpdateRepoResponsePb pb = new UpdateRepoResponsePb(); + + return pb; + } + + static UpdateRepoResponse fromPb(UpdateRepoResponsePb pb) { + UpdateRepoResponse model = new UpdateRepoResponse(); + + return model; + } + + public static class UpdateRepoResponseSerializer extends JsonSerializer { + @Override + public void serialize(UpdateRepoResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + UpdateRepoResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class UpdateRepoResponseDeserializer extends JsonDeserializer { + @Override + public UpdateRepoResponse deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + UpdateRepoResponsePb pb = mapper.readValue(p, UpdateRepoResponsePb.class); + return UpdateRepoResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponsePb.java new file mode 100755 index 000000000..16d0817ee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponsePb.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +class UpdateRepoResponsePb { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateRepoResponsePb.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java index 477ceb249..9ae314d1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java @@ -21,7 +21,7 @@ public void delete(Delete request) { String path = "/api/2.0/workspace/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteResponse.class); @@ -35,7 +35,7 @@ public ExportResponse export(ExportRequest request) { String path = "/api/2.0/workspace/export"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ExportResponse.class); } catch (IOException e) { @@ -52,7 +52,7 @@ public GetWorkspaceObjectPermissionLevelsResponse getPermissionLevels( request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, GetWorkspaceObjectPermissionLevelsResponse.class); } catch (IOException e) { @@ -68,7 +68,7 @@ public WorkspaceObjectPermissions getPermissions(GetWorkspaceObjectPermissionsRe request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, WorkspaceObjectPermissions.class); } catch (IOException e) { @@ -81,7 +81,7 @@ public ObjectInfo getStatus(GetStatusRequest request) { String path = "/api/2.0/workspace/get-status"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ObjectInfo.class); } catch (IOException e) { @@ -94,7 +94,7 @@ public void importContent(Import request) { String path = "/api/2.0/workspace/import"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ImportResponse.class); @@ -108,7 +108,7 @@ public ListResponse list(ListWorkspaceRequest request) { String path = "/api/2.0/workspace/list"; try { Request req = new Request("GET", path); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListResponse.class); } catch (IOException e) { @@ -121,7 +121,7 @@ public void mkdirs(Mkdirs request) { String path = "/api/2.0/workspace/mkdirs"; try { Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, MkdirsResponse.class); @@ -138,7 +138,7 @@ public WorkspaceObjectPermissions setPermissions(WorkspaceObjectPermissionsReque request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); try { Request req = new Request("PUT", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WorkspaceObjectPermissions.class); @@ -155,7 +155,7 @@ public WorkspaceObjectPermissions updatePermissions(WorkspaceObjectPermissionsRe request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + ApiClient.setQuery(req, request.toPb()); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, WorkspaceObjectPermissions.class); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java index edb8f3b3e..2d93926e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java @@ -4,25 +4,35 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = WorkspaceObjectAccessControlRequest.WorkspaceObjectAccessControlRequestSerializer.class) +@JsonDeserialize( + using = + WorkspaceObjectAccessControlRequest.WorkspaceObjectAccessControlRequestDeserializer.class) public class WorkspaceObjectAccessControlRequest { /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Permission level */ - @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; /** application ID of a service principal */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public WorkspaceObjectAccessControlRequest setGroupName(String groupName) { @@ -87,4 +97,48 @@ public String toString() { .add("userName", userName) .toString(); } + + WorkspaceObjectAccessControlRequestPb toPb() { + WorkspaceObjectAccessControlRequestPb pb = new WorkspaceObjectAccessControlRequestPb(); + pb.setGroupName(groupName); + pb.setPermissionLevel(permissionLevel); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static WorkspaceObjectAccessControlRequest fromPb(WorkspaceObjectAccessControlRequestPb pb) { + WorkspaceObjectAccessControlRequest model = new WorkspaceObjectAccessControlRequest(); + model.setGroupName(pb.getGroupName()); + model.setPermissionLevel(pb.getPermissionLevel()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class WorkspaceObjectAccessControlRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectAccessControlRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectAccessControlRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectAccessControlRequestDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectAccessControlRequest deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectAccessControlRequestPb pb = + mapper.readValue(p, WorkspaceObjectAccessControlRequestPb.class); + return WorkspaceObjectAccessControlRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequestPb.java new file mode 100755 index 000000000..7bd993c78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequestPb.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WorkspaceObjectAccessControlRequestPb { + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("permission_level") + private WorkspaceObjectPermissionLevel permissionLevel; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public WorkspaceObjectAccessControlRequestPb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public WorkspaceObjectAccessControlRequestPb setPermissionLevel( + WorkspaceObjectPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WorkspaceObjectPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + public WorkspaceObjectAccessControlRequestPb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public WorkspaceObjectAccessControlRequestPb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectAccessControlRequestPb that = (WorkspaceObjectAccessControlRequestPb) o; + return Objects.equals(groupName, that.groupName) + && Objects.equals(permissionLevel, that.permissionLevel) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, permissionLevel, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectAccessControlRequestPb.class) + .add("groupName", groupName) + .add("permissionLevel", permissionLevel) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponse.java index d2e4c1d93..67aafac8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponse.java @@ -4,30 +4,40 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = + WorkspaceObjectAccessControlResponse.WorkspaceObjectAccessControlResponseSerializer.class) +@JsonDeserialize( + using = + WorkspaceObjectAccessControlResponse.WorkspaceObjectAccessControlResponseDeserializer.class) public class WorkspaceObjectAccessControlResponse { /** All permissions. */ - @JsonProperty("all_permissions") private Collection allPermissions; /** Display name of the user or service principal. */ - @JsonProperty("display_name") private String displayName; /** name of the group */ - @JsonProperty("group_name") private String groupName; /** Name of the service principal. */ - @JsonProperty("service_principal_name") private String servicePrincipalName; /** name of the user */ - @JsonProperty("user_name") private String userName; public WorkspaceObjectAccessControlResponse setAllPermissions( @@ -103,4 +113,50 @@ public String toString() { .add("userName", userName) .toString(); } + + WorkspaceObjectAccessControlResponsePb toPb() { + WorkspaceObjectAccessControlResponsePb pb = new WorkspaceObjectAccessControlResponsePb(); + pb.setAllPermissions(allPermissions); + pb.setDisplayName(displayName); + pb.setGroupName(groupName); + pb.setServicePrincipalName(servicePrincipalName); + pb.setUserName(userName); + + return pb; + } + + static WorkspaceObjectAccessControlResponse fromPb(WorkspaceObjectAccessControlResponsePb pb) { + WorkspaceObjectAccessControlResponse model = new WorkspaceObjectAccessControlResponse(); + model.setAllPermissions(pb.getAllPermissions()); + model.setDisplayName(pb.getDisplayName()); + model.setGroupName(pb.getGroupName()); + model.setServicePrincipalName(pb.getServicePrincipalName()); + model.setUserName(pb.getUserName()); + + return model; + } + + public static class WorkspaceObjectAccessControlResponseSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectAccessControlResponse value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectAccessControlResponsePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectAccessControlResponseDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectAccessControlResponse deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectAccessControlResponsePb pb = + mapper.readValue(p, WorkspaceObjectAccessControlResponsePb.class); + return WorkspaceObjectAccessControlResponse.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponsePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponsePb.java new file mode 100755 index 000000000..3b66e0c23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlResponsePb.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WorkspaceObjectAccessControlResponsePb { + @JsonProperty("all_permissions") + private Collection allPermissions; + + @JsonProperty("display_name") + private String displayName; + + @JsonProperty("group_name") + private String groupName; + + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + @JsonProperty("user_name") + private String userName; + + public WorkspaceObjectAccessControlResponsePb setAllPermissions( + Collection allPermissions) { + this.allPermissions = allPermissions; + return this; + } + + public Collection getAllPermissions() { + return allPermissions; + } + + public WorkspaceObjectAccessControlResponsePb setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public WorkspaceObjectAccessControlResponsePb setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + + public WorkspaceObjectAccessControlResponsePb setServicePrincipalName( + String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public WorkspaceObjectAccessControlResponsePb setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectAccessControlResponsePb that = (WorkspaceObjectAccessControlResponsePb) o; + return Objects.equals(allPermissions, that.allPermissions) + && Objects.equals(displayName, that.displayName) + && Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(allPermissions, displayName, groupName, servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectAccessControlResponsePb.class) + .add("allPermissions", allPermissions) + .add("displayName", displayName) + .add("groupName", groupName) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java index 748bd2854..b34d2faf1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WorkspaceObjectPermission.WorkspaceObjectPermissionSerializer.class) +@JsonDeserialize(using = WorkspaceObjectPermission.WorkspaceObjectPermissionDeserializer.class) public class WorkspaceObjectPermission { /** */ - @JsonProperty("inherited") private Boolean inherited; /** */ - @JsonProperty("inherited_from_object") private Collection inheritedFromObject; /** Permission level */ - @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; public WorkspaceObjectPermission setInherited(Boolean inherited) { @@ -73,4 +81,45 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + WorkspaceObjectPermissionPb toPb() { + WorkspaceObjectPermissionPb pb = new WorkspaceObjectPermissionPb(); + pb.setInherited(inherited); + pb.setInheritedFromObject(inheritedFromObject); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static WorkspaceObjectPermission fromPb(WorkspaceObjectPermissionPb pb) { + WorkspaceObjectPermission model = new WorkspaceObjectPermission(); + model.setInherited(pb.getInherited()); + model.setInheritedFromObject(pb.getInheritedFromObject()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class WorkspaceObjectPermissionSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectPermission value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectPermissionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectPermissionDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectPermission deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectPermissionPb pb = mapper.readValue(p, WorkspaceObjectPermissionPb.class); + return WorkspaceObjectPermission.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionPb.java new file mode 100755 index 000000000..fb8da0c86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionPb.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WorkspaceObjectPermissionPb { + @JsonProperty("inherited") + private Boolean inherited; + + @JsonProperty("inherited_from_object") + private Collection inheritedFromObject; + + @JsonProperty("permission_level") + private WorkspaceObjectPermissionLevel permissionLevel; + + public WorkspaceObjectPermissionPb setInherited(Boolean inherited) { + this.inherited = inherited; + return this; + } + + public Boolean getInherited() { + return inherited; + } + + public WorkspaceObjectPermissionPb setInheritedFromObject( + Collection inheritedFromObject) { + this.inheritedFromObject = inheritedFromObject; + return this; + } + + public Collection getInheritedFromObject() { + return inheritedFromObject; + } + + public WorkspaceObjectPermissionPb setPermissionLevel( + WorkspaceObjectPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WorkspaceObjectPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectPermissionPb that = (WorkspaceObjectPermissionPb) o; + return Objects.equals(inherited, that.inherited) + && Objects.equals(inheritedFromObject, that.inheritedFromObject) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(inherited, inheritedFromObject, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectPermissionPb.class) + .add("inherited", inherited) + .add("inheritedFromObject", inheritedFromObject) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissions.java index b3f4f5bae..c91c63cef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissions.java @@ -4,22 +4,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize(using = WorkspaceObjectPermissions.WorkspaceObjectPermissionsSerializer.class) +@JsonDeserialize(using = WorkspaceObjectPermissions.WorkspaceObjectPermissionsDeserializer.class) public class WorkspaceObjectPermissions { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** */ - @JsonProperty("object_id") private String objectId; /** */ - @JsonProperty("object_type") private String objectType; public WorkspaceObjectPermissions setAccessControlList( @@ -73,4 +81,45 @@ public String toString() { .add("objectType", objectType) .toString(); } + + WorkspaceObjectPermissionsPb toPb() { + WorkspaceObjectPermissionsPb pb = new WorkspaceObjectPermissionsPb(); + pb.setAccessControlList(accessControlList); + pb.setObjectId(objectId); + pb.setObjectType(objectType); + + return pb; + } + + static WorkspaceObjectPermissions fromPb(WorkspaceObjectPermissionsPb pb) { + WorkspaceObjectPermissions model = new WorkspaceObjectPermissions(); + model.setAccessControlList(pb.getAccessControlList()); + model.setObjectId(pb.getObjectId()); + model.setObjectType(pb.getObjectType()); + + return model; + } + + public static class WorkspaceObjectPermissionsSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectPermissions value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectPermissionsPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectPermissionsDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectPermissions deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectPermissionsPb pb = mapper.readValue(p, WorkspaceObjectPermissionsPb.class); + return WorkspaceObjectPermissions.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java index 31b42d41f..16d1d9250 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java @@ -4,17 +4,31 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Objects; @Generated +@JsonSerialize( + using = + WorkspaceObjectPermissionsDescription.WorkspaceObjectPermissionsDescriptionSerializer.class) +@JsonDeserialize( + using = + WorkspaceObjectPermissionsDescription.WorkspaceObjectPermissionsDescriptionDeserializer + .class) public class WorkspaceObjectPermissionsDescription { /** */ - @JsonProperty("description") private String description; /** Permission level */ - @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; public WorkspaceObjectPermissionsDescription setDescription(String description) { @@ -57,4 +71,44 @@ public String toString() { .add("permissionLevel", permissionLevel) .toString(); } + + WorkspaceObjectPermissionsDescriptionPb toPb() { + WorkspaceObjectPermissionsDescriptionPb pb = new WorkspaceObjectPermissionsDescriptionPb(); + pb.setDescription(description); + pb.setPermissionLevel(permissionLevel); + + return pb; + } + + static WorkspaceObjectPermissionsDescription fromPb(WorkspaceObjectPermissionsDescriptionPb pb) { + WorkspaceObjectPermissionsDescription model = new WorkspaceObjectPermissionsDescription(); + model.setDescription(pb.getDescription()); + model.setPermissionLevel(pb.getPermissionLevel()); + + return model; + } + + public static class WorkspaceObjectPermissionsDescriptionSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectPermissionsDescription value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectPermissionsDescriptionPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectPermissionsDescriptionDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectPermissionsDescription deserialize( + JsonParser p, DeserializationContext ctxt) throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectPermissionsDescriptionPb pb = + mapper.readValue(p, WorkspaceObjectPermissionsDescriptionPb.class); + return WorkspaceObjectPermissionsDescription.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescriptionPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescriptionPb.java new file mode 100755 index 000000000..8e9218aaf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescriptionPb.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +class WorkspaceObjectPermissionsDescriptionPb { + @JsonProperty("description") + private String description; + + @JsonProperty("permission_level") + private WorkspaceObjectPermissionLevel permissionLevel; + + public WorkspaceObjectPermissionsDescriptionPb setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public WorkspaceObjectPermissionsDescriptionPb setPermissionLevel( + WorkspaceObjectPermissionLevel permissionLevel) { + this.permissionLevel = permissionLevel; + return this; + } + + public WorkspaceObjectPermissionLevel getPermissionLevel() { + return permissionLevel; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectPermissionsDescriptionPb that = (WorkspaceObjectPermissionsDescriptionPb) o; + return Objects.equals(description, that.description) + && Objects.equals(permissionLevel, that.permissionLevel); + } + + @Override + public int hashCode() { + return Objects.hash(description, permissionLevel); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectPermissionsDescriptionPb.class) + .add("description", description) + .add("permissionLevel", permissionLevel) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsPb.java new file mode 100755 index 000000000..48cea2e2f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsPb.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WorkspaceObjectPermissionsPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonProperty("object_id") + private String objectId; + + @JsonProperty("object_type") + private String objectType; + + public WorkspaceObjectPermissionsPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public WorkspaceObjectPermissionsPb setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public WorkspaceObjectPermissionsPb setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectPermissionsPb that = (WorkspaceObjectPermissionsPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectPermissionsPb.class) + .add("accessControlList", accessControlList) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java index 1ad8a644d..35c393e64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java @@ -4,22 +4,33 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; import java.util.Collection; import java.util.Objects; @Generated +@JsonSerialize( + using = WorkspaceObjectPermissionsRequest.WorkspaceObjectPermissionsRequestSerializer.class) +@JsonDeserialize( + using = WorkspaceObjectPermissionsRequest.WorkspaceObjectPermissionsRequestDeserializer.class) public class WorkspaceObjectPermissionsRequest { /** */ - @JsonProperty("access_control_list") private Collection accessControlList; /** The workspace object for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectId; + private String workspaceObjectId; /** The workspace object type for which to get or manage permissions. */ - @JsonIgnore private String workspaceObjectType; + private String workspaceObjectType; public WorkspaceObjectPermissionsRequest setAccessControlList( Collection accessControlList) { @@ -72,4 +83,46 @@ public String toString() { .add("workspaceObjectType", workspaceObjectType) .toString(); } + + WorkspaceObjectPermissionsRequestPb toPb() { + WorkspaceObjectPermissionsRequestPb pb = new WorkspaceObjectPermissionsRequestPb(); + pb.setAccessControlList(accessControlList); + pb.setWorkspaceObjectId(workspaceObjectId); + pb.setWorkspaceObjectType(workspaceObjectType); + + return pb; + } + + static WorkspaceObjectPermissionsRequest fromPb(WorkspaceObjectPermissionsRequestPb pb) { + WorkspaceObjectPermissionsRequest model = new WorkspaceObjectPermissionsRequest(); + model.setAccessControlList(pb.getAccessControlList()); + model.setWorkspaceObjectId(pb.getWorkspaceObjectId()); + model.setWorkspaceObjectType(pb.getWorkspaceObjectType()); + + return model; + } + + public static class WorkspaceObjectPermissionsRequestSerializer + extends JsonSerializer { + @Override + public void serialize( + WorkspaceObjectPermissionsRequest value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + WorkspaceObjectPermissionsRequestPb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class WorkspaceObjectPermissionsRequestDeserializer + extends JsonDeserializer { + @Override + public WorkspaceObjectPermissionsRequest deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + // The Codec is set by us in the SerDeUtils.java, and it is an ObjectMapper. + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + WorkspaceObjectPermissionsRequestPb pb = + mapper.readValue(p, WorkspaceObjectPermissionsRequestPb.class); + return WorkspaceObjectPermissionsRequest.fromPb(pb); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequestPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequestPb.java new file mode 100755 index 000000000..29bffb965 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequestPb.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +class WorkspaceObjectPermissionsRequestPb { + @JsonProperty("access_control_list") + private Collection accessControlList; + + @JsonIgnore private String workspaceObjectId; + + @JsonIgnore private String workspaceObjectType; + + public WorkspaceObjectPermissionsRequestPb setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public WorkspaceObjectPermissionsRequestPb setWorkspaceObjectId(String workspaceObjectId) { + this.workspaceObjectId = workspaceObjectId; + return this; + } + + public String getWorkspaceObjectId() { + return workspaceObjectId; + } + + public WorkspaceObjectPermissionsRequestPb setWorkspaceObjectType(String workspaceObjectType) { + this.workspaceObjectType = workspaceObjectType; + return this; + } + + public String getWorkspaceObjectType() { + return workspaceObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceObjectPermissionsRequestPb that = (WorkspaceObjectPermissionsRequestPb) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(workspaceObjectId, that.workspaceObjectId) + && Objects.equals(workspaceObjectType, that.workspaceObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, workspaceObjectId, workspaceObjectType); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceObjectPermissionsRequestPb.class) + .add("accessControlList", accessControlList) + .add("workspaceObjectId", workspaceObjectId) + .add("workspaceObjectType", workspaceObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnum.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnum.java new file mode 100755 index 000000000..34c5e516d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnum.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum TestEnum { + TEST_ENUM_FIVE, + TEST_ENUM_FOUR, + TEST_ENUM_ONE, + TEST_ENUM_THREE, + TEST_ENUM_TWO, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnumPb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnumPb.java new file mode 100755 index 000000000..919523a42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestEnumPb.java @@ -0,0 +1,3 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessage.java new file mode 100755 index 000000000..22ffbd04f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessage.java @@ -0,0 +1,360 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.*; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +@JsonSerialize(using = TestMarshallMessage.TestMarshallMessageSerializer.class) +@JsonDeserialize(using = TestMarshallMessage.TestMarshallMessageDeserializer.class) +public class TestMarshallMessage { + /** */ + private Map mapValue; + + /** */ + private TestEnum testEnum; + + /** */ + private TestNestedMessage testNestedMessage; + + /** */ + private Boolean testOptionalBool; + + /** */ + private Long testOptionalInt; + + /** */ + private Long testOptionalInt64; + + /** */ + private String testOptionalString; + + /** */ + private Collection testRepeatedBool; + + /** */ + private Collection testRepeatedEnum; + + /** */ + private Collection testRepeatedInt; + + /** */ + private Collection testRepeatedNestedMessage; + + /** */ + private Collection testRepeatedString; + + /** */ + private Boolean testRequiredBool; + + /** */ + private TestEnum testRequiredEnum; + + /** */ + private Long testRequiredInt; + + /** */ + private Long testRequiredInt64; + + /** */ + private String testRequiredString; + + public TestMarshallMessage setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestMarshallMessage setTestEnum(TestEnum testEnum) { + this.testEnum = testEnum; + return this; + } + + public TestEnum getTestEnum() { + return testEnum; + } + + public TestMarshallMessage setTestNestedMessage(TestNestedMessage testNestedMessage) { + this.testNestedMessage = testNestedMessage; + return this; + } + + public TestNestedMessage getTestNestedMessage() { + return testNestedMessage; + } + + public TestMarshallMessage setTestOptionalBool(Boolean testOptionalBool) { + this.testOptionalBool = testOptionalBool; + return this; + } + + public Boolean getTestOptionalBool() { + return testOptionalBool; + } + + public TestMarshallMessage setTestOptionalInt(Long testOptionalInt) { + this.testOptionalInt = testOptionalInt; + return this; + } + + public Long getTestOptionalInt() { + return testOptionalInt; + } + + public TestMarshallMessage setTestOptionalInt64(Long testOptionalInt64) { + this.testOptionalInt64 = testOptionalInt64; + return this; + } + + public Long getTestOptionalInt64() { + return testOptionalInt64; + } + + public TestMarshallMessage setTestOptionalString(String testOptionalString) { + this.testOptionalString = testOptionalString; + return this; + } + + public String getTestOptionalString() { + return testOptionalString; + } + + public TestMarshallMessage setTestRepeatedBool(Collection testRepeatedBool) { + this.testRepeatedBool = testRepeatedBool; + return this; + } + + public Collection getTestRepeatedBool() { + return testRepeatedBool; + } + + public TestMarshallMessage setTestRepeatedEnum(Collection testRepeatedEnum) { + this.testRepeatedEnum = testRepeatedEnum; + return this; + } + + public Collection getTestRepeatedEnum() { + return testRepeatedEnum; + } + + public TestMarshallMessage setTestRepeatedInt(Collection testRepeatedInt) { + this.testRepeatedInt = testRepeatedInt; + return this; + } + + public Collection getTestRepeatedInt() { + return testRepeatedInt; + } + + public TestMarshallMessage setTestRepeatedNestedMessage( + Collection testRepeatedNestedMessage) { + this.testRepeatedNestedMessage = testRepeatedNestedMessage; + return this; + } + + public Collection getTestRepeatedNestedMessage() { + return testRepeatedNestedMessage; + } + + public TestMarshallMessage setTestRepeatedString(Collection testRepeatedString) { + this.testRepeatedString = testRepeatedString; + return this; + } + + public Collection getTestRepeatedString() { + return testRepeatedString; + } + + public TestMarshallMessage setTestRequiredBool(Boolean testRequiredBool) { + this.testRequiredBool = testRequiredBool; + return this; + } + + public Boolean getTestRequiredBool() { + return testRequiredBool; + } + + public TestMarshallMessage setTestRequiredEnum(TestEnum testRequiredEnum) { + this.testRequiredEnum = testRequiredEnum; + return this; + } + + public TestEnum getTestRequiredEnum() { + return testRequiredEnum; + } + + public TestMarshallMessage setTestRequiredInt(Long testRequiredInt) { + this.testRequiredInt = testRequiredInt; + return this; + } + + public Long getTestRequiredInt() { + return testRequiredInt; + } + + public TestMarshallMessage setTestRequiredInt64(Long testRequiredInt64) { + this.testRequiredInt64 = testRequiredInt64; + return this; + } + + public Long getTestRequiredInt64() { + return testRequiredInt64; + } + + public TestMarshallMessage setTestRequiredString(String testRequiredString) { + this.testRequiredString = testRequiredString; + return this; + } + + public String getTestRequiredString() { + return testRequiredString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestMarshallMessage that = (TestMarshallMessage) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(testEnum, that.testEnum) + && Objects.equals(testNestedMessage, that.testNestedMessage) + && Objects.equals(testOptionalBool, that.testOptionalBool) + && Objects.equals(testOptionalInt, that.testOptionalInt) + && Objects.equals(testOptionalInt64, that.testOptionalInt64) + && Objects.equals(testOptionalString, that.testOptionalString) + && Objects.equals(testRepeatedBool, that.testRepeatedBool) + && Objects.equals(testRepeatedEnum, that.testRepeatedEnum) + && Objects.equals(testRepeatedInt, that.testRepeatedInt) + && Objects.equals(testRepeatedNestedMessage, that.testRepeatedNestedMessage) + && Objects.equals(testRepeatedString, that.testRepeatedString) + && Objects.equals(testRequiredBool, that.testRequiredBool) + && Objects.equals(testRequiredEnum, that.testRequiredEnum) + && Objects.equals(testRequiredInt, that.testRequiredInt) + && Objects.equals(testRequiredInt64, that.testRequiredInt64) + && Objects.equals(testRequiredString, that.testRequiredString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + testEnum, + testNestedMessage, + testOptionalBool, + testOptionalInt, + testOptionalInt64, + testOptionalString, + testRepeatedBool, + testRepeatedEnum, + testRepeatedInt, + testRepeatedNestedMessage, + testRepeatedString, + testRequiredBool, + testRequiredEnum, + testRequiredInt, + testRequiredInt64, + testRequiredString); + } + + @Override + public String toString() { + return new ToStringer(TestMarshallMessage.class) + .add("mapValue", mapValue) + .add("testEnum", testEnum) + .add("testNestedMessage", testNestedMessage) + .add("testOptionalBool", testOptionalBool) + .add("testOptionalInt", testOptionalInt) + .add("testOptionalInt64", testOptionalInt64) + .add("testOptionalString", testOptionalString) + .add("testRepeatedBool", testRepeatedBool) + .add("testRepeatedEnum", testRepeatedEnum) + .add("testRepeatedInt", testRepeatedInt) + .add("testRepeatedNestedMessage", testRepeatedNestedMessage) + .add("testRepeatedString", testRepeatedString) + .add("testRequiredBool", testRequiredBool) + .add("testRequiredEnum", testRequiredEnum) + .add("testRequiredInt", testRequiredInt) + .add("testRequiredInt64", testRequiredInt64) + .add("testRequiredString", testRequiredString) + .toString(); + } + + TestMarshallMessagePb toPb() { + TestMarshallMessagePb pb = new TestMarshallMessagePb(); + pb.setMap(mapValue); + pb.setTestEnum(testEnum); + pb.setTestNestedMessage(testNestedMessage); + pb.setTestOptionalBool(testOptionalBool); + pb.setTestOptionalInt(testOptionalInt); + pb.setTestOptionalInt64(testOptionalInt64); + pb.setTestOptionalString(testOptionalString); + pb.setTestRepeatedBool(testRepeatedBool); + pb.setTestRepeatedEnum(testRepeatedEnum); + pb.setTestRepeatedInt(testRepeatedInt); + pb.setTestRepeatedNestedMessage(testRepeatedNestedMessage); + pb.setTestRepeatedString(testRepeatedString); + pb.setTestRequiredBool(testRequiredBool); + pb.setTestRequiredEnum(testRequiredEnum); + pb.setTestRequiredInt(testRequiredInt); + pb.setTestRequiredInt64(testRequiredInt64); + pb.setTestRequiredString(testRequiredString); + + return pb; + } + + static TestMarshallMessage fromPb(TestMarshallMessagePb pb) { + TestMarshallMessage model = new TestMarshallMessage(); + model.setMap(pb.getMap()); + model.setTestEnum(pb.getTestEnum()); + model.setTestNestedMessage(pb.getTestNestedMessage()); + model.setTestOptionalBool(pb.getTestOptionalBool()); + model.setTestOptionalInt(pb.getTestOptionalInt()); + model.setTestOptionalInt64(pb.getTestOptionalInt64()); + model.setTestOptionalString(pb.getTestOptionalString()); + model.setTestRepeatedBool(pb.getTestRepeatedBool()); + model.setTestRepeatedEnum(pb.getTestRepeatedEnum()); + model.setTestRepeatedInt(pb.getTestRepeatedInt()); + model.setTestRepeatedNestedMessage(pb.getTestRepeatedNestedMessage()); + model.setTestRepeatedString(pb.getTestRepeatedString()); + model.setTestRequiredBool(pb.getTestRequiredBool()); + model.setTestRequiredEnum(pb.getTestRequiredEnum()); + model.setTestRequiredInt(pb.getTestRequiredInt()); + model.setTestRequiredInt64(pb.getTestRequiredInt64()); + model.setTestRequiredString(pb.getTestRequiredString()); + + return model; + } + + public static class TestMarshallMessageSerializer extends JsonSerializer { + @Override + public void serialize(TestMarshallMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TestMarshallMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TestMarshallMessageDeserializer + extends JsonDeserializer { + @Override + public TestMarshallMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TestMarshallMessagePb pb = mapper.readValue(p, TestMarshallMessagePb.class); + return TestMarshallMessage.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessagePb.java new file mode 100755 index 000000000..f19b1a767 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestMarshallMessagePb.java @@ -0,0 +1,287 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class TestMarshallMessagePb { + @JsonProperty("map") + private Map mapValue; + + @JsonProperty("test_enum") + private TestEnum testEnum; + + @JsonProperty("test_nested_message") + private TestNestedMessage testNestedMessage; + + @JsonProperty("test_optional_bool") + private Boolean testOptionalBool; + + @JsonProperty("test_optional_int") + private Long testOptionalInt; + + @JsonProperty("test_optional_int64") + private Long testOptionalInt64; + + @JsonProperty("test_optional_string") + private String testOptionalString; + + @JsonProperty("test_repeated_bool") + private Collection testRepeatedBool; + + @JsonProperty("test_repeated_enum") + private Collection testRepeatedEnum; + + @JsonProperty("test_repeated_int") + private Collection testRepeatedInt; + + @JsonProperty("test_repeated_nested_message") + private Collection testRepeatedNestedMessage; + + @JsonProperty("test_repeated_string") + private Collection testRepeatedString; + + @JsonProperty("test_required_bool") + private Boolean testRequiredBool; + + @JsonProperty("test_required_enum") + private TestEnum testRequiredEnum; + + @JsonProperty("test_required_int") + private Long testRequiredInt; + + @JsonProperty("test_required_int64") + private Long testRequiredInt64; + + @JsonProperty("test_required_string") + private String testRequiredString; + + public TestMarshallMessagePb setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestMarshallMessagePb setTestEnum(TestEnum testEnum) { + this.testEnum = testEnum; + return this; + } + + public TestEnum getTestEnum() { + return testEnum; + } + + public TestMarshallMessagePb setTestNestedMessage(TestNestedMessage testNestedMessage) { + this.testNestedMessage = testNestedMessage; + return this; + } + + public TestNestedMessage getTestNestedMessage() { + return testNestedMessage; + } + + public TestMarshallMessagePb setTestOptionalBool(Boolean testOptionalBool) { + this.testOptionalBool = testOptionalBool; + return this; + } + + public Boolean getTestOptionalBool() { + return testOptionalBool; + } + + public TestMarshallMessagePb setTestOptionalInt(Long testOptionalInt) { + this.testOptionalInt = testOptionalInt; + return this; + } + + public Long getTestOptionalInt() { + return testOptionalInt; + } + + public TestMarshallMessagePb setTestOptionalInt64(Long testOptionalInt64) { + this.testOptionalInt64 = testOptionalInt64; + return this; + } + + public Long getTestOptionalInt64() { + return testOptionalInt64; + } + + public TestMarshallMessagePb setTestOptionalString(String testOptionalString) { + this.testOptionalString = testOptionalString; + return this; + } + + public String getTestOptionalString() { + return testOptionalString; + } + + public TestMarshallMessagePb setTestRepeatedBool(Collection testRepeatedBool) { + this.testRepeatedBool = testRepeatedBool; + return this; + } + + public Collection getTestRepeatedBool() { + return testRepeatedBool; + } + + public TestMarshallMessagePb setTestRepeatedEnum(Collection testRepeatedEnum) { + this.testRepeatedEnum = testRepeatedEnum; + return this; + } + + public Collection getTestRepeatedEnum() { + return testRepeatedEnum; + } + + public TestMarshallMessagePb setTestRepeatedInt(Collection testRepeatedInt) { + this.testRepeatedInt = testRepeatedInt; + return this; + } + + public Collection getTestRepeatedInt() { + return testRepeatedInt; + } + + public TestMarshallMessagePb setTestRepeatedNestedMessage( + Collection testRepeatedNestedMessage) { + this.testRepeatedNestedMessage = testRepeatedNestedMessage; + return this; + } + + public Collection getTestRepeatedNestedMessage() { + return testRepeatedNestedMessage; + } + + public TestMarshallMessagePb setTestRepeatedString(Collection testRepeatedString) { + this.testRepeatedString = testRepeatedString; + return this; + } + + public Collection getTestRepeatedString() { + return testRepeatedString; + } + + public TestMarshallMessagePb setTestRequiredBool(Boolean testRequiredBool) { + this.testRequiredBool = testRequiredBool; + return this; + } + + public Boolean getTestRequiredBool() { + return testRequiredBool; + } + + public TestMarshallMessagePb setTestRequiredEnum(TestEnum testRequiredEnum) { + this.testRequiredEnum = testRequiredEnum; + return this; + } + + public TestEnum getTestRequiredEnum() { + return testRequiredEnum; + } + + public TestMarshallMessagePb setTestRequiredInt(Long testRequiredInt) { + this.testRequiredInt = testRequiredInt; + return this; + } + + public Long getTestRequiredInt() { + return testRequiredInt; + } + + public TestMarshallMessagePb setTestRequiredInt64(Long testRequiredInt64) { + this.testRequiredInt64 = testRequiredInt64; + return this; + } + + public Long getTestRequiredInt64() { + return testRequiredInt64; + } + + public TestMarshallMessagePb setTestRequiredString(String testRequiredString) { + this.testRequiredString = testRequiredString; + return this; + } + + public String getTestRequiredString() { + return testRequiredString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestMarshallMessagePb that = (TestMarshallMessagePb) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(testEnum, that.testEnum) + && Objects.equals(testNestedMessage, that.testNestedMessage) + && Objects.equals(testOptionalBool, that.testOptionalBool) + && Objects.equals(testOptionalInt, that.testOptionalInt) + && Objects.equals(testOptionalInt64, that.testOptionalInt64) + && Objects.equals(testOptionalString, that.testOptionalString) + && Objects.equals(testRepeatedBool, that.testRepeatedBool) + && Objects.equals(testRepeatedEnum, that.testRepeatedEnum) + && Objects.equals(testRepeatedInt, that.testRepeatedInt) + && Objects.equals(testRepeatedNestedMessage, that.testRepeatedNestedMessage) + && Objects.equals(testRepeatedString, that.testRepeatedString) + && Objects.equals(testRequiredBool, that.testRequiredBool) + && Objects.equals(testRequiredEnum, that.testRequiredEnum) + && Objects.equals(testRequiredInt, that.testRequiredInt) + && Objects.equals(testRequiredInt64, that.testRequiredInt64) + && Objects.equals(testRequiredString, that.testRequiredString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + testEnum, + testNestedMessage, + testOptionalBool, + testOptionalInt, + testOptionalInt64, + testOptionalString, + testRepeatedBool, + testRepeatedEnum, + testRepeatedInt, + testRepeatedNestedMessage, + testRepeatedString, + testRequiredBool, + testRequiredEnum, + testRequiredInt, + testRequiredInt64, + testRequiredString); + } + + @Override + public String toString() { + return new ToStringer(TestMarshallMessagePb.class) + .add("mapValue", mapValue) + .add("testEnum", testEnum) + .add("testNestedMessage", testNestedMessage) + .add("testOptionalBool", testOptionalBool) + .add("testOptionalInt", testOptionalInt) + .add("testOptionalInt64", testOptionalInt64) + .add("testOptionalString", testOptionalString) + .add("testRepeatedBool", testRepeatedBool) + .add("testRepeatedEnum", testRepeatedEnum) + .add("testRepeatedInt", testRepeatedInt) + .add("testRepeatedNestedMessage", testRepeatedNestedMessage) + .add("testRepeatedString", testRepeatedString) + .add("testRequiredBool", testRequiredBool) + .add("testRequiredEnum", testRequiredEnum) + .add("testRequiredInt", testRequiredInt) + .add("testRequiredInt64", testRequiredInt64) + .add("testRequiredString", testRequiredString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessage.java new file mode 100755 index 000000000..7590f88d0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessage.java @@ -0,0 +1,222 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.*; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +@JsonSerialize(using = TestNestedMessage.TestNestedMessageSerializer.class) +@JsonDeserialize(using = TestNestedMessage.TestNestedMessageDeserializer.class) +public class TestNestedMessage { + /** */ + private Map mapValue; + + /** */ + private Boolean nestedBool; + + /** */ + private TestEnum nestedEnum; + + /** */ + private Long nestedInt; + + /** */ + private Collection nestedRepeatedEnum; + + /** */ + private Collection nestedRepeatedString; + + /** */ + private Boolean nestedRequiredBool; + + /** */ + private Long nestedRequiredInt; + + /** */ + private String nestedString; + + public TestNestedMessage setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestNestedMessage setNestedBool(Boolean nestedBool) { + this.nestedBool = nestedBool; + return this; + } + + public Boolean getNestedBool() { + return nestedBool; + } + + public TestNestedMessage setNestedEnum(TestEnum nestedEnum) { + this.nestedEnum = nestedEnum; + return this; + } + + public TestEnum getNestedEnum() { + return nestedEnum; + } + + public TestNestedMessage setNestedInt(Long nestedInt) { + this.nestedInt = nestedInt; + return this; + } + + public Long getNestedInt() { + return nestedInt; + } + + public TestNestedMessage setNestedRepeatedEnum(Collection nestedRepeatedEnum) { + this.nestedRepeatedEnum = nestedRepeatedEnum; + return this; + } + + public Collection getNestedRepeatedEnum() { + return nestedRepeatedEnum; + } + + public TestNestedMessage setNestedRepeatedString(Collection nestedRepeatedString) { + this.nestedRepeatedString = nestedRepeatedString; + return this; + } + + public Collection getNestedRepeatedString() { + return nestedRepeatedString; + } + + public TestNestedMessage setNestedRequiredBool(Boolean nestedRequiredBool) { + this.nestedRequiredBool = nestedRequiredBool; + return this; + } + + public Boolean getNestedRequiredBool() { + return nestedRequiredBool; + } + + public TestNestedMessage setNestedRequiredInt(Long nestedRequiredInt) { + this.nestedRequiredInt = nestedRequiredInt; + return this; + } + + public Long getNestedRequiredInt() { + return nestedRequiredInt; + } + + public TestNestedMessage setNestedString(String nestedString) { + this.nestedString = nestedString; + return this; + } + + public String getNestedString() { + return nestedString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestNestedMessage that = (TestNestedMessage) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(nestedBool, that.nestedBool) + && Objects.equals(nestedEnum, that.nestedEnum) + && Objects.equals(nestedInt, that.nestedInt) + && Objects.equals(nestedRepeatedEnum, that.nestedRepeatedEnum) + && Objects.equals(nestedRepeatedString, that.nestedRepeatedString) + && Objects.equals(nestedRequiredBool, that.nestedRequiredBool) + && Objects.equals(nestedRequiredInt, that.nestedRequiredInt) + && Objects.equals(nestedString, that.nestedString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + nestedBool, + nestedEnum, + nestedInt, + nestedRepeatedEnum, + nestedRepeatedString, + nestedRequiredBool, + nestedRequiredInt, + nestedString); + } + + @Override + public String toString() { + return new ToStringer(TestNestedMessage.class) + .add("mapValue", mapValue) + .add("nestedBool", nestedBool) + .add("nestedEnum", nestedEnum) + .add("nestedInt", nestedInt) + .add("nestedRepeatedEnum", nestedRepeatedEnum) + .add("nestedRepeatedString", nestedRepeatedString) + .add("nestedRequiredBool", nestedRequiredBool) + .add("nestedRequiredInt", nestedRequiredInt) + .add("nestedString", nestedString) + .toString(); + } + + TestNestedMessagePb toPb() { + TestNestedMessagePb pb = new TestNestedMessagePb(); + pb.setMap(mapValue); + pb.setNestedBool(nestedBool); + pb.setNestedEnum(nestedEnum); + pb.setNestedInt(nestedInt); + pb.setNestedRepeatedEnum(nestedRepeatedEnum); + pb.setNestedRepeatedString(nestedRepeatedString); + pb.setNestedRequiredBool(nestedRequiredBool); + pb.setNestedRequiredInt(nestedRequiredInt); + pb.setNestedString(nestedString); + + return pb; + } + + static TestNestedMessage fromPb(TestNestedMessagePb pb) { + TestNestedMessage model = new TestNestedMessage(); + model.setMap(pb.getMap()); + model.setNestedBool(pb.getNestedBool()); + model.setNestedEnum(pb.getNestedEnum()); + model.setNestedInt(pb.getNestedInt()); + model.setNestedRepeatedEnum(pb.getNestedRepeatedEnum()); + model.setNestedRepeatedString(pb.getNestedRepeatedString()); + model.setNestedRequiredBool(pb.getNestedRequiredBool()); + model.setNestedRequiredInt(pb.getNestedRequiredInt()); + model.setNestedString(pb.getNestedString()); + + return model; + } + + public static class TestNestedMessageSerializer extends JsonSerializer { + @Override + public void serialize(TestNestedMessage value, JsonGenerator gen, SerializerProvider provider) + throws IOException { + TestNestedMessagePb pb = value.toPb(); + provider.defaultSerializeValue(pb, gen); + } + } + + public static class TestNestedMessageDeserializer extends JsonDeserializer { + @Override + public TestNestedMessage deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException { + ObjectMapper mapper = (ObjectMapper) p.getCodec(); + TestNestedMessagePb pb = mapper.readValue(p, TestNestedMessagePb.class); + return TestNestedMessage.fromPb(pb); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessagePb.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessagePb.java new file mode 100755 index 000000000..7f649bed4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/after/TestNestedMessagePb.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.after; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +class TestNestedMessagePb { + @JsonProperty("map") + private Map mapValue; + + @JsonProperty("nested_bool") + private Boolean nestedBool; + + @JsonProperty("nested_enum") + private TestEnum nestedEnum; + + @JsonProperty("nested_int") + private Long nestedInt; + + @JsonProperty("nested_repeated_enum") + private Collection nestedRepeatedEnum; + + @JsonProperty("nested_repeated_string") + private Collection nestedRepeatedString; + + @JsonProperty("nested_required_bool") + private Boolean nestedRequiredBool; + + @JsonProperty("nested_required_int") + private Long nestedRequiredInt; + + @JsonProperty("nested_string") + private String nestedString; + + public TestNestedMessagePb setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestNestedMessagePb setNestedBool(Boolean nestedBool) { + this.nestedBool = nestedBool; + return this; + } + + public Boolean getNestedBool() { + return nestedBool; + } + + public TestNestedMessagePb setNestedEnum(TestEnum nestedEnum) { + this.nestedEnum = nestedEnum; + return this; + } + + public TestEnum getNestedEnum() { + return nestedEnum; + } + + public TestNestedMessagePb setNestedInt(Long nestedInt) { + this.nestedInt = nestedInt; + return this; + } + + public Long getNestedInt() { + return nestedInt; + } + + public TestNestedMessagePb setNestedRepeatedEnum(Collection nestedRepeatedEnum) { + this.nestedRepeatedEnum = nestedRepeatedEnum; + return this; + } + + public Collection getNestedRepeatedEnum() { + return nestedRepeatedEnum; + } + + public TestNestedMessagePb setNestedRepeatedString(Collection nestedRepeatedString) { + this.nestedRepeatedString = nestedRepeatedString; + return this; + } + + public Collection getNestedRepeatedString() { + return nestedRepeatedString; + } + + public TestNestedMessagePb setNestedRequiredBool(Boolean nestedRequiredBool) { + this.nestedRequiredBool = nestedRequiredBool; + return this; + } + + public Boolean getNestedRequiredBool() { + return nestedRequiredBool; + } + + public TestNestedMessagePb setNestedRequiredInt(Long nestedRequiredInt) { + this.nestedRequiredInt = nestedRequiredInt; + return this; + } + + public Long getNestedRequiredInt() { + return nestedRequiredInt; + } + + public TestNestedMessagePb setNestedString(String nestedString) { + this.nestedString = nestedString; + return this; + } + + public String getNestedString() { + return nestedString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestNestedMessagePb that = (TestNestedMessagePb) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(nestedBool, that.nestedBool) + && Objects.equals(nestedEnum, that.nestedEnum) + && Objects.equals(nestedInt, that.nestedInt) + && Objects.equals(nestedRepeatedEnum, that.nestedRepeatedEnum) + && Objects.equals(nestedRepeatedString, that.nestedRepeatedString) + && Objects.equals(nestedRequiredBool, that.nestedRequiredBool) + && Objects.equals(nestedRequiredInt, that.nestedRequiredInt) + && Objects.equals(nestedString, that.nestedString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + nestedBool, + nestedEnum, + nestedInt, + nestedRepeatedEnum, + nestedRepeatedString, + nestedRequiredBool, + nestedRequiredInt, + nestedString); + } + + @Override + public String toString() { + return new ToStringer(TestNestedMessagePb.class) + .add("mapValue", mapValue) + .add("nestedBool", nestedBool) + .add("nestedEnum", nestedEnum) + .add("nestedInt", nestedInt) + .add("nestedRepeatedEnum", nestedRepeatedEnum) + .add("nestedRepeatedString", nestedRepeatedString) + .add("nestedRequiredBool", nestedRequiredBool) + .add("nestedRequiredInt", nestedRequiredInt) + .add("nestedString", nestedString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestEnum.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestEnum.java new file mode 100755 index 000000000..2379fc977 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestEnum.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.before; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum TestEnum { + TEST_ENUM_FIVE, + TEST_ENUM_FOUR, + TEST_ENUM_ONE, + TEST_ENUM_THREE, + TEST_ENUM_TWO, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestMarshallMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestMarshallMessage.java new file mode 100755 index 000000000..23de82eb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestMarshallMessage.java @@ -0,0 +1,304 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.before; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class TestMarshallMessage { + /** */ + @JsonProperty("map") + private Map mapValue; + + /** */ + @JsonProperty("test_enum") + private TestEnum testEnum; + + /** */ + @JsonProperty("test_nested_message") + private TestNestedMessage testNestedMessage; + + /** */ + @JsonProperty("test_optional_bool") + private Boolean testOptionalBool; + + /** */ + @JsonProperty("test_optional_int") + private Long testOptionalInt; + + /** */ + @JsonProperty("test_optional_int64") + private Long testOptionalInt64; + + /** */ + @JsonProperty("test_optional_string") + private String testOptionalString; + + /** */ + @JsonProperty("test_repeated_bool") + private Collection testRepeatedBool; + + /** */ + @JsonProperty("test_repeated_enum") + private Collection testRepeatedEnum; + + /** */ + @JsonProperty("test_repeated_int") + private Collection testRepeatedInt; + + /** */ + @JsonProperty("test_repeated_nested_message") + private Collection testRepeatedNestedMessage; + + /** */ + @JsonProperty("test_repeated_string") + private Collection testRepeatedString; + + /** */ + @JsonProperty("test_required_bool") + private Boolean testRequiredBool; + + /** */ + @JsonProperty("test_required_enum") + private TestEnum testRequiredEnum; + + /** */ + @JsonProperty("test_required_int") + private Long testRequiredInt; + + /** */ + @JsonProperty("test_required_int64") + private Long testRequiredInt64; + + /** */ + @JsonProperty("test_required_string") + private String testRequiredString; + + public TestMarshallMessage setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestMarshallMessage setTestEnum(TestEnum testEnum) { + this.testEnum = testEnum; + return this; + } + + public TestEnum getTestEnum() { + return testEnum; + } + + public TestMarshallMessage setTestNestedMessage(TestNestedMessage testNestedMessage) { + this.testNestedMessage = testNestedMessage; + return this; + } + + public TestNestedMessage getTestNestedMessage() { + return testNestedMessage; + } + + public TestMarshallMessage setTestOptionalBool(Boolean testOptionalBool) { + this.testOptionalBool = testOptionalBool; + return this; + } + + public Boolean getTestOptionalBool() { + return testOptionalBool; + } + + public TestMarshallMessage setTestOptionalInt(Long testOptionalInt) { + this.testOptionalInt = testOptionalInt; + return this; + } + + public Long getTestOptionalInt() { + return testOptionalInt; + } + + public TestMarshallMessage setTestOptionalInt64(Long testOptionalInt64) { + this.testOptionalInt64 = testOptionalInt64; + return this; + } + + public Long getTestOptionalInt64() { + return testOptionalInt64; + } + + public TestMarshallMessage setTestOptionalString(String testOptionalString) { + this.testOptionalString = testOptionalString; + return this; + } + + public String getTestOptionalString() { + return testOptionalString; + } + + public TestMarshallMessage setTestRepeatedBool(Collection testRepeatedBool) { + this.testRepeatedBool = testRepeatedBool; + return this; + } + + public Collection getTestRepeatedBool() { + return testRepeatedBool; + } + + public TestMarshallMessage setTestRepeatedEnum(Collection testRepeatedEnum) { + this.testRepeatedEnum = testRepeatedEnum; + return this; + } + + public Collection getTestRepeatedEnum() { + return testRepeatedEnum; + } + + public TestMarshallMessage setTestRepeatedInt(Collection testRepeatedInt) { + this.testRepeatedInt = testRepeatedInt; + return this; + } + + public Collection getTestRepeatedInt() { + return testRepeatedInt; + } + + public TestMarshallMessage setTestRepeatedNestedMessage( + Collection testRepeatedNestedMessage) { + this.testRepeatedNestedMessage = testRepeatedNestedMessage; + return this; + } + + public Collection getTestRepeatedNestedMessage() { + return testRepeatedNestedMessage; + } + + public TestMarshallMessage setTestRepeatedString(Collection testRepeatedString) { + this.testRepeatedString = testRepeatedString; + return this; + } + + public Collection getTestRepeatedString() { + return testRepeatedString; + } + + public TestMarshallMessage setTestRequiredBool(Boolean testRequiredBool) { + this.testRequiredBool = testRequiredBool; + return this; + } + + public Boolean getTestRequiredBool() { + return testRequiredBool; + } + + public TestMarshallMessage setTestRequiredEnum(TestEnum testRequiredEnum) { + this.testRequiredEnum = testRequiredEnum; + return this; + } + + public TestEnum getTestRequiredEnum() { + return testRequiredEnum; + } + + public TestMarshallMessage setTestRequiredInt(Long testRequiredInt) { + this.testRequiredInt = testRequiredInt; + return this; + } + + public Long getTestRequiredInt() { + return testRequiredInt; + } + + public TestMarshallMessage setTestRequiredInt64(Long testRequiredInt64) { + this.testRequiredInt64 = testRequiredInt64; + return this; + } + + public Long getTestRequiredInt64() { + return testRequiredInt64; + } + + public TestMarshallMessage setTestRequiredString(String testRequiredString) { + this.testRequiredString = testRequiredString; + return this; + } + + public String getTestRequiredString() { + return testRequiredString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestMarshallMessage that = (TestMarshallMessage) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(testEnum, that.testEnum) + && Objects.equals(testNestedMessage, that.testNestedMessage) + && Objects.equals(testOptionalBool, that.testOptionalBool) + && Objects.equals(testOptionalInt, that.testOptionalInt) + && Objects.equals(testOptionalInt64, that.testOptionalInt64) + && Objects.equals(testOptionalString, that.testOptionalString) + && Objects.equals(testRepeatedBool, that.testRepeatedBool) + && Objects.equals(testRepeatedEnum, that.testRepeatedEnum) + && Objects.equals(testRepeatedInt, that.testRepeatedInt) + && Objects.equals(testRepeatedNestedMessage, that.testRepeatedNestedMessage) + && Objects.equals(testRepeatedString, that.testRepeatedString) + && Objects.equals(testRequiredBool, that.testRequiredBool) + && Objects.equals(testRequiredEnum, that.testRequiredEnum) + && Objects.equals(testRequiredInt, that.testRequiredInt) + && Objects.equals(testRequiredInt64, that.testRequiredInt64) + && Objects.equals(testRequiredString, that.testRequiredString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + testEnum, + testNestedMessage, + testOptionalBool, + testOptionalInt, + testOptionalInt64, + testOptionalString, + testRepeatedBool, + testRepeatedEnum, + testRepeatedInt, + testRepeatedNestedMessage, + testRepeatedString, + testRequiredBool, + testRequiredEnum, + testRequiredInt, + testRequiredInt64, + testRequiredString); + } + + @Override + public String toString() { + return new ToStringer(TestMarshallMessage.class) + .add("mapValue", mapValue) + .add("testEnum", testEnum) + .add("testNestedMessage", testNestedMessage) + .add("testOptionalBool", testOptionalBool) + .add("testOptionalInt", testOptionalInt) + .add("testOptionalInt64", testOptionalInt64) + .add("testOptionalString", testOptionalString) + .add("testRepeatedBool", testRepeatedBool) + .add("testRepeatedEnum", testRepeatedEnum) + .add("testRepeatedInt", testRepeatedInt) + .add("testRepeatedNestedMessage", testRepeatedNestedMessage) + .add("testRepeatedString", testRepeatedString) + .add("testRequiredBool", testRequiredBool) + .add("testRequiredEnum", testRequiredEnum) + .add("testRequiredInt", testRequiredInt) + .add("testRequiredInt64", testRequiredInt64) + .add("testRequiredString", testRequiredString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestNestedMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestNestedMessage.java new file mode 100755 index 000000000..b1d2fa36a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/testmarshall/before/TestNestedMessage.java @@ -0,0 +1,175 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.testmarshall.before; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class TestNestedMessage { + /** */ + @JsonProperty("map") + private Map mapValue; + + /** */ + @JsonProperty("nested_bool") + private Boolean nestedBool; + + /** */ + @JsonProperty("nested_enum") + private TestEnum nestedEnum; + + /** */ + @JsonProperty("nested_int") + private Long nestedInt; + + /** */ + @JsonProperty("nested_repeated_enum") + private Collection nestedRepeatedEnum; + + /** */ + @JsonProperty("nested_repeated_string") + private Collection nestedRepeatedString; + + /** */ + @JsonProperty("nested_required_bool") + private Boolean nestedRequiredBool; + + /** */ + @JsonProperty("nested_required_int") + private Long nestedRequiredInt; + + /** */ + @JsonProperty("nested_string") + private String nestedString; + + public TestNestedMessage setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public TestNestedMessage setNestedBool(Boolean nestedBool) { + this.nestedBool = nestedBool; + return this; + } + + public Boolean getNestedBool() { + return nestedBool; + } + + public TestNestedMessage setNestedEnum(TestEnum nestedEnum) { + this.nestedEnum = nestedEnum; + return this; + } + + public TestEnum getNestedEnum() { + return nestedEnum; + } + + public TestNestedMessage setNestedInt(Long nestedInt) { + this.nestedInt = nestedInt; + return this; + } + + public Long getNestedInt() { + return nestedInt; + } + + public TestNestedMessage setNestedRepeatedEnum(Collection nestedRepeatedEnum) { + this.nestedRepeatedEnum = nestedRepeatedEnum; + return this; + } + + public Collection getNestedRepeatedEnum() { + return nestedRepeatedEnum; + } + + public TestNestedMessage setNestedRepeatedString(Collection nestedRepeatedString) { + this.nestedRepeatedString = nestedRepeatedString; + return this; + } + + public Collection getNestedRepeatedString() { + return nestedRepeatedString; + } + + public TestNestedMessage setNestedRequiredBool(Boolean nestedRequiredBool) { + this.nestedRequiredBool = nestedRequiredBool; + return this; + } + + public Boolean getNestedRequiredBool() { + return nestedRequiredBool; + } + + public TestNestedMessage setNestedRequiredInt(Long nestedRequiredInt) { + this.nestedRequiredInt = nestedRequiredInt; + return this; + } + + public Long getNestedRequiredInt() { + return nestedRequiredInt; + } + + public TestNestedMessage setNestedString(String nestedString) { + this.nestedString = nestedString; + return this; + } + + public String getNestedString() { + return nestedString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestNestedMessage that = (TestNestedMessage) o; + return Objects.equals(mapValue, that.mapValue) + && Objects.equals(nestedBool, that.nestedBool) + && Objects.equals(nestedEnum, that.nestedEnum) + && Objects.equals(nestedInt, that.nestedInt) + && Objects.equals(nestedRepeatedEnum, that.nestedRepeatedEnum) + && Objects.equals(nestedRepeatedString, that.nestedRepeatedString) + && Objects.equals(nestedRequiredBool, that.nestedRequiredBool) + && Objects.equals(nestedRequiredInt, that.nestedRequiredInt) + && Objects.equals(nestedString, that.nestedString); + } + + @Override + public int hashCode() { + return Objects.hash( + mapValue, + nestedBool, + nestedEnum, + nestedInt, + nestedRepeatedEnum, + nestedRepeatedString, + nestedRequiredBool, + nestedRequiredInt, + nestedString); + } + + @Override + public String toString() { + return new ToStringer(TestNestedMessage.class) + .add("mapValue", mapValue) + .add("nestedBool", nestedBool) + .add("nestedEnum", nestedEnum) + .add("nestedInt", nestedInt) + .add("nestedRepeatedEnum", nestedRepeatedEnum) + .add("nestedRepeatedString", nestedRepeatedString) + .add("nestedRequiredBool", nestedRequiredBool) + .add("nestedRequiredInt", nestedRequiredInt) + .add("nestedString", nestedString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/testmarshall/before/TestNestedMessageTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/testmarshall/before/TestNestedMessageTest.java new file mode 100644 index 000000000..27d2b992e --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/testmarshall/before/TestNestedMessageTest.java @@ -0,0 +1,106 @@ +package com.databricks.sdk.testmarshall.before; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.*; +import org.junit.jupiter.api.Test; + +class TestNestedMessageTest { + @Test + void testMarshallMessageSerializationEquality() throws Exception { + // Prepare nested message values + Map map = new HashMap<>(); + map.put("key1", "value1"); + map.put("key2", "value2"); + List repeatedStrings = Arrays.asList("a", "b"); + List repeatedEnumsBefore = + Arrays.asList( + com.databricks.sdk.testmarshall.before.TestEnum.TEST_ENUM_ONE, + com.databricks.sdk.testmarshall.before.TestEnum.TEST_ENUM_TWO); + List repeatedEnumsAfter = + Arrays.asList( + com.databricks.sdk.testmarshall.after.TestEnum.TEST_ENUM_ONE, + com.databricks.sdk.testmarshall.after.TestEnum.TEST_ENUM_TWO); + + // Create nested messages + com.databricks.sdk.testmarshall.before.TestNestedMessage beforeNested = + new com.databricks.sdk.testmarshall.before.TestNestedMessage() + .setMap(map) + .setNestedBool(true) + .setNestedEnum(com.databricks.sdk.testmarshall.before.TestEnum.TEST_ENUM_ONE) + .setNestedInt(123L) + .setNestedRepeatedEnum(repeatedEnumsBefore) + .setNestedRepeatedString(repeatedStrings) + .setNestedRequiredBool(true) + .setNestedRequiredInt(456L) + .setNestedString("nested"); + + com.databricks.sdk.testmarshall.after.TestNestedMessage afterNested = + new com.databricks.sdk.testmarshall.after.TestNestedMessage() + .setMap(map) + .setNestedBool(true) + .setNestedEnum(com.databricks.sdk.testmarshall.after.TestEnum.TEST_ENUM_ONE) + .setNestedInt(123L) + .setNestedRepeatedEnum(repeatedEnumsAfter) + .setNestedRepeatedString(repeatedStrings) + .setNestedRequiredBool(true) + .setNestedRequiredInt(456L) + .setNestedString("nested"); + + // Prepare repeated fields for main message + List repeatedBools = Arrays.asList(true, false); + List repeatedInts = Arrays.asList(1L, 2L); + List repeatedNestedBefore = + Arrays.asList(beforeNested); + List repeatedNestedAfter = + Arrays.asList(afterNested); + + // Create main messages + com.databricks.sdk.testmarshall.before.TestMarshallMessage beforeMsg = + new com.databricks.sdk.testmarshall.before.TestMarshallMessage() + .setMap(map) + .setTestEnum(com.databricks.sdk.testmarshall.before.TestEnum.TEST_ENUM_ONE) + .setTestNestedMessage(beforeNested) + .setTestOptionalBool(false) + .setTestOptionalInt(10L) + .setTestOptionalInt64(20L) + .setTestOptionalString("optional") + .setTestRepeatedBool(repeatedBools) + .setTestRepeatedEnum(repeatedEnumsBefore) + .setTestRepeatedInt(repeatedInts) + .setTestRepeatedNestedMessage(repeatedNestedBefore) + .setTestRepeatedString(repeatedStrings) + .setTestRequiredBool(true) + .setTestRequiredEnum(com.databricks.sdk.testmarshall.before.TestEnum.TEST_ENUM_TWO) + .setTestRequiredInt(100L) + .setTestRequiredInt64(200L) + .setTestRequiredString("required"); + + com.databricks.sdk.testmarshall.after.TestMarshallMessage afterMsg = + new com.databricks.sdk.testmarshall.after.TestMarshallMessage() + .setMap(map) + .setTestEnum(com.databricks.sdk.testmarshall.after.TestEnum.TEST_ENUM_ONE) + .setTestNestedMessage(afterNested) + .setTestOptionalBool(false) + .setTestOptionalInt(10L) + .setTestOptionalInt64(20L) + .setTestOptionalString("optional") + .setTestRepeatedBool(repeatedBools) + .setTestRepeatedEnum(repeatedEnumsAfter) + .setTestRepeatedInt(repeatedInts) + .setTestRepeatedNestedMessage(repeatedNestedAfter) + .setTestRepeatedString(repeatedStrings) + .setTestRequiredBool(true) + .setTestRequiredEnum(com.databricks.sdk.testmarshall.after.TestEnum.TEST_ENUM_TWO) + .setTestRequiredInt(100L) + .setTestRequiredInt64(200L) + .setTestRequiredString("required"); + + ObjectMapper mapper = new ObjectMapper(); + String beforeJson = mapper.writeValueAsString(beforeMsg); + String afterJson = mapper.writeValueAsString(afterMsg); + + assertEquals(beforeJson, afterJson, "Serialized JSON should be the same for before and after"); + } +}